You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by gi...@apache.org on 2018/08/02 19:51:47 UTC

[01/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Repository: hbase-site
Updated Branches:
  refs/heads/asf-site a44d79699 -> 7cf6034ba


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.html b/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.html
index 88d8c36..996b13a 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":42,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":42,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Public
-public class <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.56">MiniHBaseCluster</a>
+public class <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.57">MiniHBaseCluster</a>
 extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></pre>
 <div class="block">This class creates a single process HBase cluster.
  each server.  The master uses the 'default' FileSystem.  The RegionServers,
@@ -463,38 +463,45 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 </tr>
 <tr id="i38" class="altColor">
 <td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#killNameNode-org.apache.hadoop.hbase.ServerName-">killNameNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
+<div class="block">Kills the namenode process if this is a distributed cluster, otherwise, this causes master to
+ exit doing basic clean up only.</div>
+</td>
+</tr>
+<tr id="i39" class="rowColor">
+<td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#killRegionServer-org.apache.hadoop.hbase.ServerName-">killRegionServer</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Kills the region server process if this is a distributed cluster, otherwise
  this causes the region server to exit doing basic clean up only.</div>
 </td>
 </tr>
-<tr id="i39" class="rowColor">
+<tr id="i40" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#killZkNode-org.apache.hadoop.hbase.ServerName-">killZkNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Kills the zookeeper node process if this is a distributed cluster, otherwise,
  this causes master to exit doing basic clean up only.</div>
 </td>
 </tr>
-<tr id="i40" class="altColor">
+<tr id="i41" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#shutdown--">shutdown</a></span>()</code>
 <div class="block">Shut down the mini HBase cluster</div>
 </td>
 </tr>
-<tr id="i41" class="rowColor">
+<tr id="i42" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startDataNode-org.apache.hadoop.hbase.ServerName-">startDataNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Starts a new datanode on the given hostname or if this is a mini/local cluster,
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i42" class="altColor">
+<tr id="i43" class="rowColor">
 <td class="colFirst"><code>org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startMaster--">startMaster</a></span>()</code>
 <div class="block">Starts a master thread running</div>
 </td>
 </tr>
-<tr id="i43" class="rowColor">
+<tr id="i44" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startMaster-java.lang.String-int-">startMaster</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
            int&nbsp;port)</code>
@@ -502,13 +509,20 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
  starts a master locally.</div>
 </td>
 </tr>
-<tr id="i44" class="altColor">
+<tr id="i45" class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startNameNode-org.apache.hadoop.hbase.ServerName-">startNameNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
+<div class="block">Starts a new namenode on the given hostname or if this is a mini/local cluster, silently logs
+ warning message.</div>
+</td>
+</tr>
+<tr id="i46" class="altColor">
 <td class="colFirst"><code>org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startRegionServer--">startRegionServer</a></span>()</code>
 <div class="block">Starts a region server thread running</div>
 </td>
 </tr>
-<tr id="i45" class="rowColor">
+<tr id="i47" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startRegionServer-java.lang.String-int-">startRegionServer</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                  int&nbsp;port)</code>
@@ -516,13 +530,13 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
  starts a region server locally.</div>
 </td>
 </tr>
-<tr id="i46" class="altColor">
+<tr id="i48" class="altColor">
 <td class="colFirst"><code>org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startRegionServerAndWait-long-">startRegionServerAndWait</a></span>(long&nbsp;timeout)</code>
 <div class="block">Starts a region server thread and waits until its processed by master.</div>
 </td>
 </tr>
-<tr id="i47" class="rowColor">
+<tr id="i49" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startZkNode-java.lang.String-int-">startZkNode</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
            int&nbsp;port)</code>
@@ -530,120 +544,140 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i48" class="altColor">
+<tr id="i50" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopDataNode-org.apache.hadoop.hbase.ServerName-">stopDataNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Stops the datanode if this is a distributed cluster, otherwise
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i49" class="rowColor">
+<tr id="i51" class="rowColor">
 <td class="colFirst"><code>org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopMaster-int-">stopMaster</a></span>(int&nbsp;serverNumber)</code>
 <div class="block">Shut down the specified master cleanly</div>
 </td>
 </tr>
-<tr id="i50" class="altColor">
+<tr id="i52" class="altColor">
 <td class="colFirst"><code>org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopMaster-int-boolean-">stopMaster</a></span>(int&nbsp;serverNumber,
           boolean&nbsp;shutdownFS)</code>
 <div class="block">Shut down the specified master cleanly</div>
 </td>
 </tr>
-<tr id="i51" class="rowColor">
+<tr id="i53" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopMaster-org.apache.hadoop.hbase.ServerName-">stopMaster</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Stops the given master, by attempting a gradual stop.</div>
 </td>
 </tr>
-<tr id="i52" class="altColor">
+<tr id="i54" class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopNameNode-org.apache.hadoop.hbase.ServerName-">stopNameNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
+<div class="block">Stops the namenode if this is a distributed cluster, otherwise silently logs warning message.</div>
+</td>
+</tr>
+<tr id="i55" class="rowColor">
 <td class="colFirst"><code>org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopRegionServer-int-">stopRegionServer</a></span>(int&nbsp;serverNumber)</code>
 <div class="block">Shut down the specified region server cleanly</div>
 </td>
 </tr>
-<tr id="i53" class="rowColor">
+<tr id="i56" class="altColor">
 <td class="colFirst"><code>org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopRegionServer-int-boolean-">stopRegionServer</a></span>(int&nbsp;serverNumber,
                 boolean&nbsp;shutdownFS)</code>
 <div class="block">Shut down the specified region server cleanly</div>
 </td>
 </tr>
-<tr id="i54" class="altColor">
+<tr id="i57" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopRegionServer-org.apache.hadoop.hbase.ServerName-">stopRegionServer</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Stops the given region server, by attempting a gradual stop.</div>
 </td>
 </tr>
-<tr id="i55" class="rowColor">
+<tr id="i58" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopZkNode-org.apache.hadoop.hbase.ServerName-">stopZkNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Stops the region zookeeper if this is a distributed cluster, otherwise
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i56" class="altColor">
+<tr id="i59" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForActiveAndReadyMaster-long-">waitForActiveAndReadyMaster</a></span>(long&nbsp;timeout)</code>
 <div class="block">Blocks until there is an active master and that master has completed
  initialization.</div>
 </td>
 </tr>
-<tr id="i57" class="rowColor">
+<tr id="i60" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForDataNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForDataNodeToStart</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                       long&nbsp;timeout)</code>
 <div class="block">Wait for the specified datanode to join the cluster</div>
 </td>
 </tr>
-<tr id="i58" class="altColor">
+<tr id="i61" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForDataNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForDataNodeToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                      long&nbsp;timeout)</code>
 <div class="block">Wait for the specified datanode to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i59" class="rowColor">
+<tr id="i62" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForMasterToStop-org.apache.hadoop.hbase.ServerName-long-">waitForMasterToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                    long&nbsp;timeout)</code>
 <div class="block">Wait for the specified master to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i60" class="altColor">
+<tr id="i63" class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStart</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+                      long&nbsp;timeout)</code>
+<div class="block">Wait for the specified namenode to join the cluster</div>
+</td>
+</tr>
+<tr id="i64" class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+                     long&nbsp;timeout)</code>
+<div class="block">Wait for the specified namenode to stop</div>
+</td>
+</tr>
+<tr id="i65" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForRegionServerToStop-org.apache.hadoop.hbase.ServerName-long-">waitForRegionServerToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                          long&nbsp;timeout)</code>
 <div class="block">Wait for the specified region server to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i61" class="rowColor">
+<tr id="i66" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForZkNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForZkNodeToStart</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                     long&nbsp;timeout)</code>
 <div class="block">Wait for the specified zookeeper node to join the cluster</div>
 </td>
 </tr>
-<tr id="i62" class="altColor">
+<tr id="i67" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForZkNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForZkNodeToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                    long&nbsp;timeout)</code>
 <div class="block">Wait for the specified zookeeper node to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i63" class="rowColor">
+<tr id="i68" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitOnMaster-int-">waitOnMaster</a></span>(int&nbsp;serverNumber)</code>
 <div class="block">Wait for the specified master to stop.</div>
 </td>
 </tr>
-<tr id="i64" class="altColor">
+<tr id="i69" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitOnRegionServer-int-">waitOnRegionServer</a></span>(int&nbsp;serverNumber)</code>
 <div class="block">Wait for the specified region server to stop.</div>
 </td>
 </tr>
-<tr id="i65" class="rowColor">
+<tr id="i70" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitUntilShutDown--">waitUntilShutDown</a></span>()</code>
 <div class="block">Wait for HBase Cluster to shut down.</div>
@@ -684,7 +718,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.57">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.58">LOG</a></pre>
 </li>
 </ul>
 <a name="hbaseCluster">
@@ -693,7 +727,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>hbaseCluster</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.LocalHBaseCluster <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.58">hbaseCluster</a></pre>
+<pre>public&nbsp;org.apache.hadoop.hbase.LocalHBaseCluster <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.59">hbaseCluster</a></pre>
 </li>
 </ul>
 <a name="index">
@@ -702,7 +736,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockListLast">
 <li class="blockList">
 <h4>index</h4>
-<pre>private static&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.59">index</a></pre>
+<pre>private static&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.60">index</a></pre>
 </li>
 </ul>
 </li>
@@ -719,7 +753,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>MiniHBaseCluster</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.67">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.68">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                         int&nbsp;numRegionServers)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -740,7 +774,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>MiniHBaseCluster</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.79">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.80">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                         int&nbsp;numMasters,
                         int&nbsp;numRegionServers)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
@@ -763,7 +797,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>MiniHBaseCluster</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.90">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.91">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                         int&nbsp;numMasters,
                         int&nbsp;numRegionServers,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends org.apache.hadoop.hbase.master.HMaster&gt;&nbsp;masterClass,
@@ -788,7 +822,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MiniHBaseCluster</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.105">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.106">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                         int&nbsp;numMasters,
                         int&nbsp;numRegionServers,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;rsPorts,
@@ -822,7 +856,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>getConfiguration</h4>
-<pre>public&nbsp;org.apache.hadoop.conf.Configuration&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.119">getConfiguration</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.conf.Configuration&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.120">getConfiguration</a>()</pre>
 </li>
 </ul>
 <a name="init-int-int-java.util.List-java.lang.Class-java.lang.Class-">
@@ -831,7 +865,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>init</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.231">init</a>(int&nbsp;nMasterNodes,
+<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.232">init</a>(int&nbsp;nMasterNodes,
                   int&nbsp;nRegionNodes,
                   <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;rsPorts,
                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends org.apache.hadoop.hbase.master.HMaster&gt;&nbsp;masterClass,
@@ -851,7 +885,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>startRegionServer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.270">startRegionServer</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.271">startRegionServer</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                               int&nbsp;port)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startRegionServer-java.lang.String-int-">HBaseCluster</a></code></span></div>
@@ -873,7 +907,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>killRegionServer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.275">killRegionServer</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.276">killRegionServer</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#killRegionServer-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
 <div class="block">Kills the region server process if this is a distributed cluster, otherwise
@@ -892,7 +926,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>isKilledRS</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.286">isKilledRS</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.287">isKilledRS</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#isKilledRS-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
 <div class="block">Keeping track of killed servers and being able to check if a particular server was killed makes
  it possible to do fault tolerance testing for dead servers in a deterministic way. A concrete
@@ -911,7 +945,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>stopRegionServer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.291">stopRegionServer</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.292">stopRegionServer</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopRegionServer-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
 <div class="block">Stops the given region server, by attempting a gradual stop.</div>
@@ -929,7 +963,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForRegionServerToStop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.296">waitForRegionServerToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.297">waitForRegionServerToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                       long&nbsp;timeout)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForRegionServerToStop-org.apache.hadoop.hbase.ServerName-long-">HBaseCluster</a></code></span></div>
@@ -948,7 +982,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>startZkNode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.302">startZkNode</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.303">startZkNode</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                         int&nbsp;port)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startZkNode-java.lang.String-int-">HBaseCluster</a></code></span></div>
@@ -970,7 +1004,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>killZkNode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.307">killZkNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.308">killZkNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#killZkNode-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
 <div class="block">Kills the zookeeper node process if this is a distributed cluster, otherwise,
@@ -989,7 +1023,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>stopZkNode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.312">stopZkNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.313">stopZkNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopZkNode-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
 <div class="block">Stops the region zookeeper if this is a distributed cluster, otherwise
@@ -1008,7 +1042,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForZkNodeToStart</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.317">waitForZkNodeToStart</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.318">waitForZkNodeToStart</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                  long&nbsp;timeout)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForZkNodeToStart-org.apache.hadoop.hbase.ServerName-long-">HBaseCluster</a></code></span></div>
@@ -1027,7 +1061,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForZkNodeToStop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.322">waitForZkNodeToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.323">waitForZkNodeToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                 long&nbsp;timeout)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForZkNodeToStop-org.apache.hadoop.hbase.ServerName-long-">HBaseCluster</a></code></span></div>
@@ -1046,7 +1080,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>startDataNode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.327">startDataNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.328">startDataNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startDataNode-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
 <div class="block">Starts a new datanode on the given hostname or if this is a mini/local cluster,
@@ -1065,7 +1099,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>killDataNode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.332">killDataNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.333">killDataNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#killDataNode-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
 <div class="block">Kills the datanode process if this is a distributed cluster, otherwise,
@@ -1084,7 +1118,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>stopDataNode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.337">stopDataNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.338">stopDataNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopDataNode-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
 <div class="block">Stops the datanode if this is a distributed cluster, otherwise
@@ -1103,7 +1137,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForDataNodeToStart</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.342">waitForDataNodeToStart</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.343">waitForDataNodeToStart</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                    long&nbsp;timeout)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForDataNodeToStart-org.apache.hadoop.hbase.ServerName-long-">HBaseCluster</a></code></span></div>
@@ -1122,7 +1156,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForDataNodeToStop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.347">waitForDataNodeToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.348">waitForDataNodeToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                   long&nbsp;timeout)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForDataNodeToStop-org.apache.hadoop.hbase.ServerName-long-">HBaseCluster</a></code></span></div>
@@ -1135,13 +1169,107 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 </dl>
 </li>
 </ul>
+<a name="startNameNode-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>startNameNode</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.353">startNameNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startNameNode-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
+<div class="block">Starts a new namenode on the given hostname or if this is a mini/local cluster, silently logs
+ warning message.</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startNameNode-org.apache.hadoop.hbase.ServerName-">startNameNode</a></code>&nbsp;in class&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong</dd>
+</dl>
+</li>
+</ul>
+<a name="killNameNode-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>killNameNode</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.358">killNameNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#killNameNode-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
+<div class="block">Kills the namenode process if this is a distributed cluster, otherwise, this causes master to
+ exit doing basic clean up only.</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#killNameNode-org.apache.hadoop.hbase.ServerName-">killNameNode</a></code>&nbsp;in class&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong</dd>
+</dl>
+</li>
+</ul>
+<a name="stopNameNode-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>stopNameNode</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.363">stopNameNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopNameNode-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
+<div class="block">Stops the namenode if this is a distributed cluster, otherwise silently logs warning message.</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopNameNode-org.apache.hadoop.hbase.ServerName-">stopNameNode</a></code>&nbsp;in class&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong</dd>
+</dl>
+</li>
+</ul>
+<a name="waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>waitForNameNodeToStart</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.368">waitForNameNodeToStart</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+                                   long&nbsp;timeout)
+                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">HBaseCluster</a></code></span></div>
+<div class="block">Wait for the specified namenode to join the cluster</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStart</a></code>&nbsp;in class&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong or timeout occurs</dd>
+</dl>
+</li>
+</ul>
+<a name="waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>waitForNameNodeToStop</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.373">waitForNameNodeToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+                                  long&nbsp;timeout)
+                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">HBaseCluster</a></code></span></div>
+<div class="block">Wait for the specified namenode to stop</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStop</a></code>&nbsp;in class&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong or timeout occurs</dd>
+</dl>
+</li>
+</ul>
 <a name="startMaster-java.lang.String-int-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>startMaster</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.352">startMaster</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.378">startMaster</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                         int&nbsp;port)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startMaster-java.lang.String-int-">HBaseCluster</a></code></span></div>
@@ -1163,7 +1291,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>killMaster</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.357">killMaster</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.383">killMaster</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#killMaster-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
 <div class="block">Kills the master process if this is a distributed cluster, otherwise,
@@ -1182,7 +1310,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>stopMaster</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.362">stopMaster</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.388">stopMaster</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopMaster-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
 <div class="block">Stops the given master, by attempting a gradual stop.</div>
@@ -1200,7 +1328,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForMasterToStop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.367">waitForMasterToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.393">waitForMasterToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                 long&nbsp;timeout)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForMasterToStop-org.apache.hadoop.hbase.ServerName-long-">HBaseCluster</a></code></span></div>
@@ -1219,7 +1347,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>startRegionServer</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.378">startRegionServer</a>()
+<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.404">startRegionServer</a>()
                                                                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts a region server thread running</div>
 <dl>
@@ -1236,7 +1364,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>startRegionServerAndWait</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.402">startRegionServerAndWait</a>(long&nbsp;timeout)
+<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.428">startRegionServerAndWait</a>(long&nbsp;timeout)
                                                                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts a region server thread and waits until its processed by master. Throws an exception
  when it can't start a region server or when the region server is not processed by master
@@ -1255,7 +1383,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>abortRegionServer</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.427">abortRegionServer</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.453">abortRegionServer</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Cause a region server to exit doing basic clean up only on its way out.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1269,7 +1397,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>stopRegionServer</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.440">stopRegionServer</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.466">stopRegionServer</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Shut down the specified region server cleanly</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1285,7 +1413,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>stopRegionServer</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.454">stopRegionServer</a>(int&nbsp;serverNumber,
+<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.480">stopRegionServer</a>(int&nbsp;serverNumber,
                                                                                        boolean&nbsp;shutdownFS)</pre>
 <div class="block">Shut down the specified region server cleanly</div>
 <dl>
@@ -1306,7 +1434,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitOnRegionServer</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.469">waitOnRegionServer</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.495">waitOnRegionServer</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Wait for the specified region server to stop. Removes this thread from list
  of running threads.</div>
 <dl>
@@ -1323,7 +1451,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>startMaster</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.479">startMaster</a>()
+<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.505">startMaster</a>()
                                                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts a master thread running</div>
 <dl>
@@ -1340,7 +1468,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>getMasterAdminService</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.499">getMasterAdminService</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.525">getMasterAdminService</a>()</pre>
 <div class="block">Returns the current active master, if available.</div>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -1356,7 +1484,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>getMaster</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.master.HMaster&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.507">getMaster</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.hbase.master.HMaster&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.533">getMaster</a>()</pre>
 <div class="block">Returns the current active master, if available.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -1370,7 +1498,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>getMasterThread</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.515">getMasterThread</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.541">getMasterThread</a>()</pre>
 <div class="block">Returns the current active master thread, if available.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -1384,7 +1512,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>getMaster</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.master.HMaster&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.528">getMaster</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;org.apache.hadoop.hbase.master.HMaster&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.554">getMaster</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Returns the master at the specified index, if available.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -1398,7 +1526,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>abortMaster</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.536">abortMaster</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.562">abortMaster</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Cause a master to exit without shutting down entire cluster.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1412,7 +1540,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>stopMaster</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.549">stopMaster</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.575">stopMaster</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Shut down the specified master cleanly</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1428,7 +1556,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>stopMaster</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.563">stopMaster</a>(int&nbsp;serverNumber,
+<pre>public&nbsp;org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.589">stopMaster</a>(int&nbsp;serverNumber,
                                                                            boolean&nbsp;shutdownFS)</pre>
 <div class="block">Shut down the specified master cleanly</div>
 <dl>
@@ -1449,7 +1577,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitOnMaster</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.578">waitOnMaster</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.604">waitOnMaster</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Wait for the specified master to stop. Removes this thread from list
  of running threads.</div>
 <dl>
@@ -1466,7 +1594,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForActiveAndReadyMaster</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.591">waitForActiveAndReadyMaster</a>(long&nbsp;timeout)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.617">waitForActiveAndReadyMaster</a>(long&nbsp;timeout)
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Blocks until there is an active master and that master has completed
  initialization.</div>
@@ -1490,7 +1618,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>getMasterThreads</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.610">getMasterThreads</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.636">getMasterThreads</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>List of master threads.</dd>
@@ -1503,7 +1631,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>getLiveMasterThreads</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.617">getLiveMasterThreads</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.643">getLiveMasterThreads</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>List of live master threads (skips the aborted and the killed)</dd>
@@ -1516,7 +1644,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>join</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.624">join</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.650">join</a>()</pre>
 <div class="block">Wait for Mini HBase Cluster to shut down.</div>
 </li>
 </ul>
@@ -1526,7 +1654,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>shutdown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.632">shutdown</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.658">shutdown</a>()
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Shut down the mini HBase cluster</div>
 <dl>
@@ -1543,7 +1671,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.639">close</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.665">close</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#close--">HBaseCluster</a></code></span></div>
 <div class="block">Closes all the resources held open for this cluster. Note that this call does not shutdown
@@ -1569,7 +1697,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <li class="blockList">
 <h4>getClusterStatus</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.647">getClusterStatus</a>()
+public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.673">getClusterStatus</a>()
                                                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">As of release 2.0.0, this will be removed in HBase 3.0.0
              Use <a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#getClusterMetrics--"><code>getClusterMetrics()</code></a> instead.</span></div>
@@ -1585,7 +1713,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>getClusterMetrics</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.ClusterMetrics&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.653">getClusterMetrics</a>()
+<pre>public&nbsp;org.apache.hadoop.hbase.ClusterMetrics&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.679">getClusterMetrics</a>()
                                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#getClusterMetrics--">HBaseCluster</a></code></span></div>
 <div class="block">Returns a ClusterMetrics for this HBase cluster.</div>
@@ -1605,7 +1733,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>executeFlush</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.658">executeFlush</a>(org.apache.hadoop.hbase.regionserver.HRegion&nbsp;region)
+<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.684">executeFlush</a>(org.apache.hadoop.hbase.regionserver.HRegion&nbsp;region)
                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1619,7 +1747,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>flushcache</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.672">flushcache</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.698">flushcache</a>()
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Call flushCache on all regions on all participating regionservers.</div>
 <dl>
@@ -1634,7 +1762,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>flushcache</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.683">flushcache</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.709">flushcache</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Call flushCache on all regions of the specified table.</div>
 <dl>
@@ -1649,7 +1777,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>compact</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.697">compact</a>(boolean&nbsp;major)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.723">compact</a>(boolean&nbsp;major)
              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Call flushCache on all regions on all participating regionservers.</div>
 <dl>
@@ -1664,7 +1792,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>compact</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.710">compact</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.736">compact</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName,
                     boolean&nbsp;major)
              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Call flushCache on all regions of the specified table.</div>
@@ -1680,7 +1808,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionServerThreads</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.725">getRegionServerThreads</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.751">getRegionServerThreads</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>List of region server threads. Does not return the master even though it is also
@@ -1694,7 +1822,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>getLiveRegionServerThreads</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.732">getLiveRegionServerThreads</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.758">getLiveRegionServerThreads</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>List of live region server threads (skips the aborted and the killed)</dd>
@@ -1707,7 +1835,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionServer</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.regionserver.HRegionServer&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.741">getRegionServer</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;org.apache.hadoop.hbase.regionserver.HRegionServer&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.767">getRegionServer</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Grab a numbered region server of your choice.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1723,7 +1851,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionServer</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.regionserver.HRegionServer&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.745">getRegionServer</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</pre>
+<pre>public&nbsp;org.apache.hadoop.hbase.regionserver.HRegionServer&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.771">getRegionServer</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</pre>
 </li>
 </ul>
 <a name="getRegions-byte:A-">
@@ -1732,7 +1860,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegions</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.regionserver.HRegion&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.752">getRegions</a>(byte[]&nbsp;tableName)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.regionserver.HRegion&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.778">getRegions</a>(byte[]&nbsp;tableName)</pre>
 </li>
 </ul>
 <a name="getRegions-org.apache.hadoop.hbase.TableName-">
@@ -1741,7 +1869,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegions</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.regionserver.HRegion&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.756">getRegions</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.regionserver.HRegion&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.782">getRegions</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName)</pre>
 </li>
 </ul>
 <a name="getServerWithMeta--">
@@ -1750,7 +1878,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>getServerWithMeta</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.773">getServerWithMeta</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.799">getServerWithMeta</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>Index into List of <a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#getRegionServerThreads--"><code>getRegionServerThreads()</code></a>
@@ -1764,7 +1892,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>getServerWith</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.783">getServerWith</a>(byte[]&nbsp;regionName)</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.809">getServerWith</a>(byte[]&nbsp;regionName)</pre>
 <div class="block">Get the location of the specified region</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1781,7 +1909,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>getServerHoldingRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.ServerName&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.801">getServerHoldingRegion</a>(org.apache.hadoop.hbase.TableName&nbsp;tn,
+<pre>public&nbsp;org.apache.hadoop.hbase.ServerName&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.827">getServerHoldingRegion</a>(org.apache.hadoop.hbase.TableName&nbsp;tn,
                                                                  byte[]&nbsp;regionName)
                                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#getServerHoldingRegion-org.apache.hadoop.hbase.TableName-byte:A-">HBaseCluster</a></code></span></div>
@@ -1805,7 +1933,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>countServedRegions</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.825">countServedRegions</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.851">countServedRegions</a>()</pre>
 <div class="block">Counts the total numbers of regions being served by the currently online
  region servers by asking each how many regions they have.  Does not look
  at hbase:meta at all.  Count includes catalog tables.</div>
@@ -1821,7 +1949,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>killAll</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.840">killAll</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.866">killAll</a>()</pre>
 <div class="block">Do a simulated kill all masters and regionservers. Useful when it is
  impossible to bring the mini-cluster back for clean shutdown.</div>
 </li>
@@ -1832,7 +1960,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>waitUntilShutDown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.860">waitUntilShutDown</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.886">waitUntilShutDown</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitUntilShutDown--">HBaseCluster</a></code></span></div>
 <div class="block">Wait for HBase Cluster to shut down.</div>
 <dl>
@@ -1847,7 +1975,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>findRegionsForTable</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.regionserver.HRegion&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.864">findRegionsForTable</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.regionserver.HRegion&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.890">findRegionsForTable</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName)</pre>
 </li>
 </ul>
 <a name="getRegionServerIndex-org.apache.hadoop.hbase.ServerName-">
@@ -1856,7 +1984,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionServerIndex</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.878">getRegionServerIndex</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</pre>
+<pre>protected&nbsp;int&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.904">getRegionServerIndex</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</pre>
 </li>
 </ul>
 <a name="getMasterIndex-org.apache.hadoop.hbase.ServerName-">
@@ -1865,7 +1993,7 @@ public&nbsp;org.apache.hadoop.hbase.ClusterStatus&nbsp;<a href="../../../../src-
 <ul class="blockList">
 <li class="blockList">
 <h4>getMasterIndex</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../

<TRUNCATED>

[29/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
index b7b4236..3d1edb3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
@@ -259,1863 +259,1867 @@
 <span class="sourceLineNo">251</span>   * + Metadata!  + &lt;= See note on BLOCK_METADATA_SPACE above.<a name="line.251"></a>
 <span class="sourceLineNo">252</span>   * ++++++++++++++<a name="line.252"></a>
 <span class="sourceLineNo">253</span>   * &lt;/code&gt;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer)<a name="line.254"></a>
+<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer, boolean)<a name="line.254"></a>
 <span class="sourceLineNo">255</span>   */<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER =<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    @Override<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      // The buf has the file block followed by block metadata.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      ByteBuff newByteBuff;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      if (reuse) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        newByteBuff = buf.slice();<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      } else {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        int len = buf.limit();<a name="line.269"></a>
-<span class="sourceLineNo">270</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      buf.position(buf.limit());<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      long offset = buf.getLong();<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      HFileBlock hFileBlock =<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      return hFileBlock;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    public int getDeserialiserIdentifier() {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return DESERIALIZER_IDENTIFIER;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    @Override<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      // Used only in tests<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  };<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  static {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    DESERIALIZER_IDENTIFIER =<a name="line.298"></a>
-<span class="sourceLineNo">299</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>  }<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  /**<a name="line.302"></a>
-<span class="sourceLineNo">303</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.303"></a>
-<span class="sourceLineNo">304</span>   */<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private HFileBlock(HFileBlock that) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    this(that, false);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
-<span class="sourceLineNo">308</span><a name="line.308"></a>
-<span class="sourceLineNo">309</span>  /**<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.310"></a>
-<span class="sourceLineNo">311</span>   * param.<a name="line.311"></a>
-<span class="sourceLineNo">312</span>   */<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.314"></a>
-<span class="sourceLineNo">315</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    if (bufCopy) {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    } else {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.buf = that.buf.duplicate();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    }<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * is used only while writing blocks and caching,<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   *<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.331"></a>
+<span class="sourceLineNo">256</span>  public static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER = new BlockDeserializer();<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  public static final class BlockDeserializer implements CacheableDeserializer&lt;Cacheable&gt; {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    private BlockDeserializer() {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>    @Override<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        throws IOException {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      // The buf has the file block followed by block metadata.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      ByteBuff newByteBuff;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      if (reuse) {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>        newByteBuff = buf.slice();<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      } else {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        int len = buf.limit();<a name="line.273"></a>
+<span class="sourceLineNo">274</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      buf.position(buf.limit());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      long offset = buf.getLong();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      HFileBlock hFileBlock =<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      return hFileBlock;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>    @Override<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    public int getDeserialiserIdentifier() {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      return DESERIALIZER_IDENTIFIER;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>    @Override<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      // Used only in tests<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  static {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    DESERIALIZER_IDENTIFIER =<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  }<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /**<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   */<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private HFileBlock(HFileBlock that) {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    this(that, false);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * param.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.318"></a>
+<span class="sourceLineNo">319</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    if (bufCopy) {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    } else {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>      this.buf = that.buf.duplicate();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    }<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * is used only while writing blocks and caching,<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.331"></a>
 <span class="sourceLineNo">332</span>   *<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @param offset the file offset the block was read from<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   * @param fileContext HFile meta data<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  @VisibleForTesting<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      HFileContext fileContext) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    this.buf = new SingleByteBuff(b);<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    if (fillHeader) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      overwriteHeader();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    this.buf.rewind();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  /**<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * to that point.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>    buf.rewind();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    final BlockType blockType = BlockType.read(buf);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    final int uncompressedSizeWithoutHeader =<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    int onDiskDataSizeWithHeader;<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    if (usesHBaseChecksum) {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    fileContext = fileContextBuilder.build();<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    this.memType = memType;<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    this.offset = offset;<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    this.buf = buf;<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    this.buf.rewind();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  }<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  /**<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   * Called from constructors.<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      HFileContext fileContext) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    this.blockType = blockType;<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    this.offset = offset;<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    this.fileContext = fileContext;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  /**<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * Parse total on disk size including header and checksum.<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   * @return Size of the block with header included.<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   */<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      boolean verifyChecksum) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      headerSize(verifyChecksum);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * a read of the next block when scanning or running over a file.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  int getNextBlockOnDiskSize() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    return nextBlockOnDiskSize;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  @Override<a name="line.439"></a>
-<span class="sourceLineNo">440</span>  public BlockType getBlockType() {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    return blockType;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  short getDataBlockEncodingId() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    return buf.getShort(headerSize());<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  }<a name="line.451"></a>
-<span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>  /**<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   * @return the on-disk size of header + data part + checksum.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>   */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  public int getOnDiskSizeWithHeader() {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  int getOnDiskSizeWithoutHeader() {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    return onDiskSizeWithoutHeader;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>  }<a name="line.465"></a>
-<span class="sourceLineNo">466</span><a name="line.466"></a>
-<span class="sourceLineNo">467</span>  /**<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.468"></a>
-<span class="sourceLineNo">469</span>   */<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   int getUncompressedSizeWithoutHeader() {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    return uncompressedSizeWithoutHeader;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /**<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   *         -1 if unknown<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  long getPrevBlockOffset() {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    return prevBlockOffset;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>  }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>  /**<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * is modified as side-effect.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   */<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  private void overwriteHeader() {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    buf.rewind();<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    blockType.write(buf);<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    buf.putLong(prevBlockOffset);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>  }<a name="line.497"></a>
-<span class="sourceLineNo">498</span><a name="line.498"></a>
-<span class="sourceLineNo">499</span>  /**<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * Returns a buffer that does not include the header or checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   *<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    ByteBuff dup = getBufferReadOnly();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    // Now set it up so Buffer spans content only -- no header or no checksums.<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    return dup.position(headerSize()).limit(buf.limit() - totalChecksumBytes()).slice();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Returns a read-only duplicate of the buffer this block stores internally ready to be read.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * Clients must not modify the buffer object though they may set position and limit on the<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   * returned buffer since we pass back a duplicate. This method has to be public because it is used<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * filter lookup, but has to be used with caution. Buffer holds header, block content,<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * and any follow-on checksums if present.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   *<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @return the buffer of this block for read-only operations<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   */<a name="line.519"></a>
-<span class="sourceLineNo">520</span>  public ByteBuff getBufferReadOnly() {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    // TODO: ByteBuf does not support asReadOnlyBuffer(). Fix.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    ByteBuff dup = this.buf.duplicate();<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    assert dup.position() == 0;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return dup;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  @VisibleForTesting<a name="line.527"></a>
-<span class="sourceLineNo">528</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String fieldName) throws IOException {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    if (valueFromBuf != valueFromField) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  @VisibleForTesting<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      throws IOException {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    if (valueFromBuf != valueFromField) {<a name="line.539"></a>
-<span class="sourceLineNo">540</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.540"></a>
-<span class="sourceLineNo">541</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  }<a name="line.543"></a>
-<span class="sourceLineNo">544</span><a name="line.544"></a>
-<span class="sourceLineNo">545</span>  /**<a name="line.545"></a>
-<span class="sourceLineNo">546</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.547"></a>
-<span class="sourceLineNo">548</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * This function is primary for testing and debugging, and is not<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * Used by tests only.<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   */<a name="line.552"></a>
-<span class="sourceLineNo">553</span>  @VisibleForTesting<a name="line.553"></a>
-<span class="sourceLineNo">554</span>  void sanityCheck() throws IOException {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>    // Duplicate so no side-effects<a name="line.555"></a>
-<span class="sourceLineNo">556</span>    ByteBuff dup = this.buf.duplicate().rewind();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    sanityCheckAssertion(BlockType.read(dup), blockType);<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    sanityCheckAssertion(dup.getInt(), onDiskSizeWithoutHeader, "onDiskSizeWithoutHeader");<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    sanityCheckAssertion(dup.getInt(), uncompressedSizeWithoutHeader,<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        "uncompressedSizeWithoutHeader");<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    sanityCheckAssertion(dup.getLong(), prevBlockOffset, "prevBlockOffset");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      sanityCheckAssertion(dup.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      sanityCheckAssertion(dup.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.567"></a>
-<span class="sourceLineNo">568</span>          "bytesPerChecksum");<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      sanityCheckAssertion(dup.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
-<span class="sourceLineNo">571</span><a name="line.571"></a>
-<span class="sourceLineNo">572</span>    int cksumBytes = totalChecksumBytes();<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (dup.limit() != expectedBufLimit) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      throw new AssertionError("Expected limit " + expectedBufLimit + ", got " + dup.limit());<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    int hdrSize = headerSize();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    if (dup.capacity() != expectedBufLimit &amp;&amp; dup.capacity() != expectedBufLimit + hdrSize) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      throw new AssertionError("Invalid buffer capacity: " + dup.capacity() +<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  @Override<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  public String toString() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    StringBuilder sb = new StringBuilder()<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      .append("[")<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      .append("blockType=").append(blockType)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      .append(", fileOffset=").append(offset)<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      .append(", headerSize=").append(headerSize())<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      .append(", onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      .append(", uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      .append(", prevBlockOffset=").append(prevBlockOffset)<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      .append(", isUseHBaseChecksum=").append(fileContext.isUseHBaseChecksum());<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      sb.append(", checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.599"></a>
-<span class="sourceLineNo">600</span>        .append(", bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        .append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      sb.append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    String dataBegin = null;<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    if (buf.hasArray()) {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    } else {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.613"></a>
-<span class="sourceLineNo">614</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>    sb.append(", getOnDiskSizeWithHeader=").append(getOnDiskSizeWithHeader())<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      .append(", totalChecksumBytes=").append(totalChecksumBytes())<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      .append(", isUnpacked=").append(isUnpacked())<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      .append(", buf=[").append(buf).append("]")<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      .append(", dataBeginsWith=").append(dataBegin)<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      .append(", fileContext=").append(fileContext)<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      .append(", nextBlockOnDiskSize=").append(nextBlockOnDiskSize)<a name="line.624"></a>
-<span class="sourceLineNo">625</span>      .append("]");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    return sb.toString();<a name="line.626"></a>
-<span class="sourceLineNo">627</span>  }<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>  /**<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      // encryption details.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();<a name="line.645"></a>
-<span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>    ByteBuff dup = this.buf.duplicate();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    dup.position(this.headerSize());<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    dup = dup.slice();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      dup);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    return unpacked;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  /**<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   * Always allocates a new buffer of the correct size. Copies header bytes<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * from the existing buffer. Does not change header fields.<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   * Reserve room to keep checksum bytes too.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  private void allocateBuffer() {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    int cksumBytes = totalChecksumBytes();<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    int headerSize = headerSize();<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    int capacityNeeded = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // TODO we need consider allocating offheap here?<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);<a name="line.667"></a>
-<span class="sourceLineNo">668</span><a name="line.668"></a>
-<span class="sourceLineNo">669</span>    // Copy header bytes into newBuf.<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    // newBuf is HBB so no issue in calling array()<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    buf.position(0);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    buf.get(newBuf.array(), newBuf.arrayOffset(), headerSize);<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    buf = new SingleByteBuff(newBuf);<a name="line.674"></a>
-<span class="sourceLineNo">675</span>    // set limit to exclude next block's header<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * Return true when this block's buffer has been unpacked, false otherwise. Note this is a<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   * calculated heuristic, not tracked attribute of the block.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>   */<a name="line.682"></a>
-<span class="sourceLineNo">683</span>  public boolean isUnpacked() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>    final int cksumBytes = totalChecksumBytes();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    final int headerSize = headerSize();<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    final int bufCapacity = buf.capacity();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;<a name="line.688"></a>
-<span class="sourceLineNo">689</span>  }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>  /** An additional sanity-check in case no compression or encryption is being used. */<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  @VisibleForTesting<a name="line.692"></a>
-<span class="sourceLineNo">693</span>  void sanityCheckUncompressedSize() throws IOException {<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader + totalChecksumBytes()) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      throw new IOException("Using no compression but "<a name="line.695"></a>
-<span class="sourceLineNo">696</span>          + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "<a name="line.696"></a>
-<span class="sourceLineNo">697</span>          + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader<a name="line.697"></a>
-<span class="sourceLineNo">698</span>          + ", numChecksumbytes=" + totalChecksumBytes());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    }<a name="line.699"></a>
-<span class="sourceLineNo">700</span>  }<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>  /**<a name="line.702"></a>
-<span class="sourceLineNo">703</span>   * Cannot be {@link #UNSET}. Must be a legitimate value. Used re-making the {@link BlockCacheKey} when<a name="line.703"></a>
-<span class="sourceLineNo">704</span>   * block is returned to the cache.<a name="line.704"></a>
-<span class="sourceLineNo">705</span>   * @return the offset of this block in the file it was read from<a name="line.705"></a>
-<span class="sourceLineNo">706</span>   */<a name="line.706"></a>
-<span class="sourceLineNo">707</span>  long getOffset() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    if (offset &lt; 0) {<a name="line.708"></a>
-<span class="sourceLineNo">709</span>      throw new IllegalStateException("HFile block offset not initialized properly");<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    }<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    return offset;<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  /**<a name="line.714"></a>
-<span class="sourceLineNo">715</span>   * @return a byte stream reading the data + checksum of this block<a name="line.715"></a>
-<span class="sourceLineNo">716</span>   */<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  DataInputStream getByteStream() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    ByteBuff dup = this.buf.duplicate();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    dup.position(this.headerSize());<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new DataInputStream(new ByteBuffInputStream(dup));<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>  public long heapSize() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    long size = ClassSize.align(<a name="line.725"></a>
-<span class="sourceLineNo">726</span>        ClassSize.OBJECT +<a name="line.726"></a>
-<span class="sourceLineNo">727</span>        // Block type, multi byte buffer, MemoryType and meta references<a name="line.727"></a>
-<span class="sourceLineNo">728</span>        4 * ClassSize.REFERENCE +<a name="line.728"></a>
-<span class="sourceLineNo">729</span>        // On-disk size, uncompressed size, and next block's on-disk size<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        // bytePerChecksum and onDiskDataSize<a name="line.730"></a>
-<span class="sourceLineNo">731</span>        4 * Bytes.SIZEOF_INT +<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        // This and previous block offset<a name="line.732"></a>
-<span class="sourceLineNo">733</span>        2 * Bytes.SIZEOF_LONG +<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        // Heap size of the meta object. meta will be always not null.<a name="line.734"></a>
-<span class="sourceLineNo">735</span>        fileContext.heapSize()<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    );<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    if (buf != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      // Deep overhead of the byte buffer. Needs to be aligned separately.<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    return ClassSize.align(size);<a name="line.743"></a>
-<span class="sourceLineNo">744</span>  }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>  /**<a name="line.746"></a>
-<span class="sourceLineNo">747</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.747"></a>
-<span class="sourceLineNo">748</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.748"></a>
-<span class="sourceLineNo">749</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but specifies a<a name="line.749"></a>
-<span class="sourceLineNo">750</span>   * number of "extra" bytes to also optionally read.<a name="line.750"></a>
-<span class="sourceLineNo">751</span>   *<a name="line.751"></a>
-<span class="sourceLineNo">752</span>   * @param in the input stream to read from<a name="line.752"></a>
-<span class="sourceLineNo">753</span>   * @param buf the buffer to read into<a name="line.753"></a>
-<span class="sourceLineNo">754</span>   * @param bufOffset the destination offset in the buffer<a name="line.754"></a>
-<span class="sourceLineNo">755</span>   * @param necessaryLen the number of bytes that are absolutely necessary to read<a name="line.755"></a>
-<span class="sourceLineNo">756</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.756"></a>
-<span class="sourceLineNo">757</span>   * @return true if succeeded reading the extra bytes<a name="line.757"></a>
-<span class="sourceLineNo">758</span>   * @throws IOException if failed to read the necessary bytes<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   */<a name="line.759"></a>
-<span class="sourceLineNo">760</span>  static boolean readWithExtra(InputStream in, byte[] buf,<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      int bufOffset, int necessaryLen, int extraLen) throws IOException {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    while (bytesRemaining &gt; 0) {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      int ret = in.read(buf, bufOffset, bytesRemaining);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      if (ret == -1 &amp;&amp; bytesRemaining &lt;= extraLen) {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        // We could not read the "extra data", but that is OK.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>        break;<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      }<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (ret &lt; 0) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>        throw new IOException("Premature EOF from inputStream (read "<a name="line.770"></a>
-<span class="sourceLineNo">771</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.771"></a>
-<span class="sourceLineNo">772</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.772"></a>
-<span class="sourceLineNo">773</span>            + "successfully read "<a name="line.773"></a>
-<span class="sourceLineNo">774</span>            + (necessaryLen + extraLen - bytesRemaining));<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      }<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      bufOffset += ret;<a name="line.776"></a>
-<span class="sourceLineNo">777</span>      bytesRemaining -= ret;<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    }<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    return bytesRemaining &lt;= 0;<a name="line.779"></a>
-<span class="sourceLineNo">780</span>  }<a name="line.780"></a>
-<span class="sourceLineNo">781</span><a name="line.781"></a>
-<span class="sourceLineNo">782</span>  /**<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.784"></a>
-<span class="sourceLineNo">785</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but uses<a name="line.785"></a>
-<span class="sourceLineNo">786</span>   * positional read and specifies a number of "extra" bytes that would be<a name="line.786"></a>
-<span class="sourceLineNo">787</span>   * desirable but not absolutely necessary to read.<a name="line.787"></a>
-<span class="sourceLineNo">788</span>   *<a name="line.788"></a>
-<span class="sourceLineNo">789</span>   * @param in the input stream to read from<a name="line.789"></a>
-<span class="sourceLineNo">790</span>   * @param position the position within the stream from which to start reading<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * @param buf the buffer to read into<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * @param bufOffset the destination offset in the buffer<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   * @param necessaryLen the number of bytes that are absolutely necessary to<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   *     read<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   * @return true if and only if extraLen is &gt; 0 and reading those extra bytes<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   *     was successful<a name="line.797"></a>
-<span class="sourceLineNo">798</span>   * @throws IOException if failed to read the necessary bytes<a name="line.798"></a>
-<span class="sourceLineNo">799</span>   */<a name="line.799"></a>
-<span class="sourceLineNo">800</span>  @VisibleForTesting<a name="line.800"></a>
-<span class="sourceLineNo">801</span>  static boolean positionalReadWithExtra(FSDataInputStream in,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      long position, byte[] buf, int bufOffset, int necessaryLen, int extraLen)<a name="line.802"></a>
-<span class="sourceLineNo">803</span>      throws IOException {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>    int bytesRead = 0;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    while (bytesRead &lt; necessaryLen) {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      int ret = in.read(position, buf, bufOffset, bytesRemaining);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>      if (ret &lt; 0) {<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        throw new IOException("Premature EOF from inputStream (positional read "<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            + "successfully read " + bytesRead);<a name="line.812"></a>
-<span class="sourceLineNo">813</span>      }<a name="line.813"></a>
-<span class="sourceLineNo">814</span>      position += ret;<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      bufOffset += ret;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      bytesRemaining -= ret;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      bytesRead += ret;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>    }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    return bytesRead != necessaryLen &amp;&amp; bytesRemaining &lt;= 0;<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>   * Unified version 2 {@link HFile} block writer. The intended usage pattern<a name="line.823"></a>
-<span class="sourceLineNo">824</span>   * is as follows:<a name="line.824"></a>
-<span class="sourceLineNo">825</span>   * &lt;ol&gt;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>   * &lt;li&gt;Construct an {@link HFileBlock.Writer}, providing a compression algorithm.<a name="line.826"></a>
-<span class="sourceLineNo">827</span>   * &lt;li&gt;Call {@link Writer#startWriting} and get a data stream to write to.<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * &lt;li&gt;Write your data into the stream.<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * &lt;li&gt;Call Writer#writeHeaderAndData(FSDataOutputStream) as many times as you need to.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   * store the serialized block into an external stream.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * &lt;li&gt;Repeat to write more blocks.<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   * &lt;/ol&gt;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   * &lt;p&gt;<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   */<a name="line.834"></a>
-<span class="sourceLineNo">835</span>  static class Writer {<a name="line.835"></a>
-<span class="sourceLineNo">836</span>    private enum State {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>      INIT,<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      WRITING,<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      BLOCK_READY<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
-<span class="sourceLineNo">841</span><a name="line.841"></a>
-<span class="sourceLineNo">842</span>    /** Writer state. Used to ensure the correct usage protocol. */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    private State state = State.INIT;<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>    /** Data block encoder used for data blocks */<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    private final HFileDataBlockEncoder dataBlockEncoder;<a name="line.846"></a>
-<span class="sourceLineNo">847</span><a name="line.847"></a>
-<span class="sourceLineNo">848</span>    private HFileBlockEncodingContext dataBlockEncodingCtx;<a name="line.848"></a>
-<span class="sourceLineNo">849</span><a name="line.849"></a>
-<span class="sourceLineNo">850</span>    /** block encoding context for non-data blocks*/<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;<a name="line.851"></a>
-<span class="sourceLineNo">852</span><a name="line.852"></a>
-<span class="sourceLineNo">853</span>    /**<a name="line.853"></a>
-<span class="sourceLineNo">854</span>     * The stream we use to accumulate data into a block in an uncompressed format.<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * We reset this stream at the end of each block and reuse it. The<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * header is written as the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes into this<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * stream.<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    private ByteArrayOutputStream baosInMemory;<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>    /**<a name="line.861"></a>
-<span class="sourceLineNo">862</span>     * Current block type. Set in {@link #startWriting(BlockType)}. Could be<a name="line.862"></a>
-<span class="sourceLineNo">863</span>     * changed in {@link #finishBlock()} from {@link BlockType#DATA}<a name="line.863"></a>
-<span class="sourceLineNo">864</span>     * to {@link BlockType#ENCODED_DATA}.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>     */<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    private BlockType blockType;<a name="line.866"></a>
-<span class="sourceLineNo">867</span><a name="line.867"></a>
-<span class="sourceLineNo">868</span>    /**<a name="line.868"></a>
-<span class="sourceLineNo">869</span>     * A stream that we write uncompressed bytes to, which compresses them and<a name="line.869"></a>
-<span class="sourceLineNo">870</span>     * writes them to {@link #baosInMemory}.<a name="line.870"></a>
-<span class="sourceLineNo">871</span>     */<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    private DataOutputStream userDataStream;<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // Size of actual data being written. Not considering the block encoding/compression. This<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    // includes the header size also.<a name="line.875"></a>
-<span class="sourceLineNo">876</span>    private int unencodedDataSizeWritten;<a name="line.876"></a>
+<span class="sourceLineNo">333</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.335"></a>
+<span class="sourceLineNo">336</span>   *<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * @param offset the file offset the block was read from<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * @param fileContext HFile meta data<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      HFileContext fileContext) {<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.352"></a>
+<span class="sourceLineNo">353</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    this.buf = new SingleByteBuff(b);<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    if (fillHeader) {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      overwriteHeader();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    this.buf.rewind();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * to that point.<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>    buf.rewind();<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    final BlockType blockType = BlockType.read(buf);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    final int uncompressedSizeWithoutHeader =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    int onDiskDataSizeWithHeader;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    if (usesHBaseChecksum) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    } else {<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    fileContext = fileContextBuilder.build();<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    this.memType = memType;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    this.offset = offset;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    this.buf = buf;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    this.buf.rewind();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  /**<a name="line.405"></a>
+<span class="sourceLineNo">406</span>   * Called from constructors.<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      HFileContext fileContext) {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>    this.blockType = blockType;<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    this.offset = offset;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    this.fileContext = fileContext;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * Parse total on disk size including header and checksum.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   * @return Size of the block with header included.<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      boolean verifyChecksum) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      headerSize(verifyChecksum);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.435"></a>
+<span class="sourceLineNo">436</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.436"></a>
+<span class="sourceLineNo">437</span>   * a read of the next block when scanning or running over a file.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  int getNextBlockOnDiskSize() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>    return nextBlockOnDiskSize;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  public BlockType getBlockType() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    return blockType;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  short getDataBlockEncodingId() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>    }<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    return buf.getShort(headerSize());<a name="line.454"></a>
+<span class="sourceLineNo">455</span>  }<a name="line.455"></a>
+<span class="sourceLineNo">456</span><a name="line.456"></a>
+<span class="sourceLineNo">457</span>  /**<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   * @return the on-disk size of header + data part + checksum.<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   */<a name="line.459"></a>
+<span class="sourceLineNo">460</span>  public int getOnDiskSizeWithHeader() {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.461"></a>
+<span class="sourceLineNo">462</span>  }<a name="line.462"></a>
+<span class="sourceLineNo">463</span><a name="line.463"></a>
+<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
+<span class="sourceLineNo">465</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.465"></a>
+<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  int getOnDiskSizeWithoutHeader() {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return onDiskSizeWithoutHeader;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   int getUncompressedSizeWithoutHeader() {<a name="line.474"></a>
+<span class="sourceLineNo">475</span>    return uncompressedSizeWithoutHeader;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>  }<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>  /**<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   *         -1 if unknown<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   */<a name="line.481"></a>
+<span class="sourceLineNo">482</span>  long getPrevBlockOffset() {<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    return prevBlockOffset;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /**<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * is modified as side-effect.<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   */<a name="line.489"></a>
+<span class="sourceLineNo">490</span>  private void overwriteHeader() {<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    buf.rewind();<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    blockType.write(buf);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    buf.putLong(prevBlockOffset);<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.498"></a>
+<span class="sourceLineNo">499</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    }<a name="line.500"></a>
+<span class="sourceLineNo">501</span>  }<a name="line.501"></a>
+<span class="sourceLineNo">502</span><a name="line.502"></a>
+<span class="sourceLineNo">

<TRUNCATED>

[46/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
index f06b2c1..3212827 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.94">BackupSystemTable</a>
+public final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.95">BackupSystemTable</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a></pre>
 <div class="block">This class provides API to access backup system table<br>
@@ -973,7 +973,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.96">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.97">LOG</a></pre>
 </li>
 </ul>
 <a name="tableName">
@@ -982,7 +982,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>tableName</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.130">tableName</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.131">tableName</a></pre>
 <div class="block">Backup system table (main) name</div>
 </li>
 </ul>
@@ -992,7 +992,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>bulkLoadTableName</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.138">bulkLoadTableName</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.139">bulkLoadTableName</a></pre>
 <div class="block">Backup System table name for bulk loaded files. We keep all bulk loaded file references in a
  separate table because we have to isolate general backup operations: create, merge etc from
  activity of RegionObserver, which controls process of a bulk loading
@@ -1005,7 +1005,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>SESSIONS_FAMILY</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.143">SESSIONS_FAMILY</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.144">SESSIONS_FAMILY</a></pre>
 <div class="block">Stores backup sessions (contexts)</div>
 </li>
 </ul>
@@ -1015,7 +1015,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>META_FAMILY</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.147">META_FAMILY</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.148">META_FAMILY</a></pre>
 <div class="block">Stores other meta</div>
 </li>
 </ul>
@@ -1025,7 +1025,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>BULK_LOAD_FAMILY</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.148">BULK_LOAD_FAMILY</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.149">BULK_LOAD_FAMILY</a></pre>
 </li>
 </ul>
 <a name="connection">
@@ -1034,7 +1034,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>connection</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.152">connection</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.153">connection</a></pre>
 <div class="block">Connection to HBase cluster, shared among all instances</div>
 </li>
 </ul>
@@ -1044,7 +1044,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>BACKUP_INFO_PREFIX</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.154">BACKUP_INFO_PREFIX</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.155">BACKUP_INFO_PREFIX</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.backup.impl.BackupSystemTable.BACKUP_INFO_PREFIX">Constant Field Values</a></dd>
@@ -1057,7 +1057,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>START_CODE_ROW</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.155">START_CODE_ROW</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.156">START_CODE_ROW</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.backup.impl.BackupSystemTable.START_CODE_ROW">Constant Field Values</a></dd>
@@ -1070,7 +1070,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>ACTIVE_SESSION_ROW</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.156">ACTIVE_SESSION_ROW</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.157">ACTIVE_SESSION_ROW</a></pre>
 </li>
 </ul>
 <a name="ACTIVE_SESSION_COL">
@@ -1079,7 +1079,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>ACTIVE_SESSION_COL</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.157">ACTIVE_SESSION_COL</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.158">ACTIVE_SESSION_COL</a></pre>
 </li>
 </ul>
 <a name="ACTIVE_SESSION_YES">
@@ -1088,7 +1088,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>ACTIVE_SESSION_YES</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.159">ACTIVE_SESSION_YES</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.160">ACTIVE_SESSION_YES</a></pre>
 </li>
 </ul>
 <a name="ACTIVE_SESSION_NO">
@@ -1097,7 +1097,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>ACTIVE_SESSION_NO</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.160">ACTIVE_SESSION_NO</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.161">ACTIVE_SESSION_NO</a></pre>
 </li>
 </ul>
 <a name="INCR_BACKUP_SET">
@@ -1106,7 +1106,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>INCR_BACKUP_SET</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.162">INCR_BACKUP_SET</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.163">INCR_BACKUP_SET</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.backup.impl.BackupSystemTable.INCR_BACKUP_SET">Constant Field Values</a></dd>
@@ -1119,7 +1119,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>TABLE_RS_LOG_MAP_PREFIX</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.163">TABLE_RS_LOG_MAP_PREFIX</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.164">TABLE_RS_LOG_MAP_PREFIX</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.backup.impl.BackupSystemTable.TABLE_RS_LOG_MAP_PREFIX">Constant Field Values</a></dd>
@@ -1132,7 +1132,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>RS_LOG_TS_PREFIX</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.164">RS_LOG_TS_PREFIX</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.165">RS_LOG_TS_PREFIX</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.backup.impl.BackupSystemTable.RS_LOG_TS_PREFIX">Constant Field Values</a></dd>
@@ -1145,7 +1145,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>BULK_LOAD_PREFIX</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.166">BULK_LOAD_PREFIX</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.167">BULK_LOAD_PREFIX</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.backup.impl.BackupSystemTable.BULK_LOAD_PREFIX">Constant Field Values</a></dd>
@@ -1158,7 +1158,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>BULK_LOAD_PREFIX_BYTES</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.167">BULK_LOAD_PREFIX_BYTES</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.168">BULK_LOAD_PREFIX_BYTES</a></pre>
 </li>
 </ul>
 <a name="DELETE_OP_ROW">
@@ -1167,7 +1167,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>DELETE_OP_ROW</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.168">DELETE_OP_ROW</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.169">DELETE_OP_ROW</a></pre>
 </li>
 </ul>
 <a name="MERGE_OP_ROW">
@@ -1176,7 +1176,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>MERGE_OP_ROW</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.169">MERGE_OP_ROW</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.170">MERGE_OP_ROW</a></pre>
 </li>
 </ul>
 <a name="TBL_COL">
@@ -1185,7 +1185,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>TBL_COL</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.171">TBL_COL</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.172">TBL_COL</a></pre>
 </li>
 </ul>
 <a name="FAM_COL">
@@ -1194,7 +1194,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>FAM_COL</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.172">FAM_COL</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.173">FAM_COL</a></pre>
 </li>
 </ul>
 <a name="PATH_COL">
@@ -1203,7 +1203,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>PATH_COL</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.173">PATH_COL</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.174">PATH_COL</a></pre>
 </li>
 </ul>
 <a name="STATE_COL">
@@ -1212,7 +1212,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>STATE_COL</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.174">STATE_COL</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.175">STATE_COL</a></pre>
 </li>
 </ul>
 <a name="BL_PREPARE">
@@ -1221,7 +1221,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>BL_PREPARE</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.176">BL_PREPARE</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.177">BL_PREPARE</a></pre>
 </li>
 </ul>
 <a name="BL_COMMIT">
@@ -1230,7 +1230,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>BL_COMMIT</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.177">BL_COMMIT</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.178">BL_COMMIT</a></pre>
 </li>
 </ul>
 <a name="WALS_PREFIX">
@@ -1239,7 +1239,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>WALS_PREFIX</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.179">WALS_PREFIX</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.180">WALS_PREFIX</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.backup.impl.BackupSystemTable.WALS_PREFIX">Constant Field Values</a></dd>
@@ -1252,7 +1252,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>SET_KEY_PREFIX</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.180">SET_KEY_PREFIX</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.181">SET_KEY_PREFIX</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.backup.impl.BackupSystemTable.SET_KEY_PREFIX">Constant Field Values</a></dd>
@@ -1265,7 +1265,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>BLK_LD_DELIM</h4>
-<pre>protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.183">BLK_LD_DELIM</a></pre>
+<pre>protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.184">BLK_LD_DELIM</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.backup.impl.BackupSystemTable.BLK_LD_DELIM">Constant Field Values</a></dd>
@@ -1278,7 +1278,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>EMPTY_VALUE</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.184">EMPTY_VALUE</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.185">EMPTY_VALUE</a></pre>
 </li>
 </ul>
 <a name="NULL">
@@ -1287,7 +1287,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>NULL</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.187">NULL</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.188">NULL</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.backup.impl.BackupSystemTable.NULL">Constant Field Values</a></dd>
@@ -1308,7 +1308,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BackupSystemTable</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.189">BackupSystemTable</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn)
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.190">BackupSystemTable</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1330,7 +1330,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>checkSystemTable</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.197">checkSystemTable</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.198">checkSystemTable</a>()
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1344,7 +1344,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>verifyNamespaceExists</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.214">verifyNamespaceExists</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a>&nbsp;admin)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.215">verifyNamespaceExists</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a>&nbsp;admin)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1358,7 +1358,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForSystemTable</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.230">waitForSystemTable</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a>&nbsp;admin,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.231">waitForSystemTable</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a>&nbsp;admin,
                                 <a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -1373,7 +1373,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.247">close</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.248">close</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true#close--" title="class or interface in java.io">close</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a></code></dd>
@@ -1388,7 +1388,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>updateBackupInfo</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.256">updateBackupInfo</a>(<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&nbsp;info)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.257">updateBackupInfo</a>(<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&nbsp;info)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Updates status (state) of a backup session in backup system table table</div>
 <dl>
@@ -1405,7 +1405,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>readBulkLoadedFiles</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.271">readBulkLoadedFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupId)
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.272">readBulkLoadedFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupId)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1419,7 +1419,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>readBulkLoadedFiles</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&gt;[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.297">readBulkLoadedFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupId,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&gt;[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.298">readBulkLoadedFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupId,
                                                                          <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;sTableList)
                                                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -1434,7 +1434,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteBackupInfo</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.351">deleteBackupInfo</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupId)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.352">deleteBackupInfo</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupId)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Deletes backup status from backup system table table</div>
 <dl>
@@ -1451,7 +1451,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>writePathsPostBulkLoad</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.367">writePathsPostBulkLoad</a>(<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tabName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.368">writePathsPostBulkLoad</a>(<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tabName,
                                    byte[]&nbsp;region,
                                    <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&gt;&nbsp;finalPaths)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1467,7 +1467,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>writeFilesForBulkLoadPreCommit</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.387">writeFilesForBulkLoadPreCommit</a>(<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tabName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.388">writeFilesForBulkLoadPreCommit</a>(<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tabName,
                                            byte[]&nbsp;region,
                                            byte[]&nbsp;family,
                                            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.fs.Path,org.apache.hadoop.fs.Path&gt;&gt;&nbsp;pairs)
@@ -1484,7 +1484,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteBulkLoadedRows</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.406">deleteBulkLoadedRows</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;byte[]&gt;&nbsp;rows)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.407">deleteBulkLoadedRows</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;byte[]&gt;&nbsp;rows)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1498,7 +1498,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>readBulkloadRows</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com
 /javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&gt;&gt;&gt;&gt;&gt;,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;byte[]&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.426">readBulkloadRows</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../or
 g/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;tableList)
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com
 /javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&gt;&gt;&gt;&gt;&gt;,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;byte[]&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.427">readBulkloadRows</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../or
 g/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;tableList)
                                                                                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1512,7 +1512,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>writeBulkLoadedFiles</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.488">writeBulkLoadedFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;sTableList,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.489">writeBulkLoadedFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;sTableList,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&gt;[]&nbsp;maps,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupId)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1528,7 +1528,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>readBackupInfo</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.523">readBackupInfo</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupId)
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.524">readBackupInfo</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupId)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Reads backup status object (instance of backup info) from backup system table table</div>
 <dl>
@@ -1547,7 +1547,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>readBackupStartCode</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.546">readBackupStartCode</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.547">readBackupStartCode</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Read the last backup start code (timestamp) of last successful backup. Will return null if
  there is no start code stored on hbase or the value is of length 0. These two cases indicate
@@ -1568,7 +1568,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>writeBackupStartCode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.570">writeBackupStartCode</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&nbsp;startCode,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.571">writeBackupStartCode</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&nbsp;startCode,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Write the start code (timestamp) to backup system table. If passed in null, then write 0 byte.</div>
@@ -1587,7 +1587,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>startBackupExclusiveOperation</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.585">startBackupExclusiveOperation</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.586">startBackupExclusiveOperation</a>()
                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Exclusive operations are: create, delete, merge</div>
 <dl>
@@ -1603,7 +1603,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>createPutForStartBackupSession</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client">Put</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.602">createPutForStartBackupSession</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client">Put</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.603">createPutForStartBackupSession</a>()</pre>
 </li>
 </ul>
 <a name="finishBackupExclusiveOperation--">
@@ -1612,7 +1612,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>finishBackupExclusiveOperation</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.608">finishBackupExclusiveOperation</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.609">finishBackupExclusiveOperation</a>()
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1626,7 +1626,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>createPutForStopBackupSession</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client">Put</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.620">createPutForStopBackupSession</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client">Put</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.621">createPutForStopBackupSession</a>()</pre>
 </li>
 </ul>
 <a name="readRegionServerLastLogRollResult-java.lang.String-">
@@ -1635,7 +1635,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>readRegionServerLastLogRollResult</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.632">readRegionServerLastLogRollResult</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.633">readRegionServerLastLogRollResult</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
                                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get the Region Servers log information after the last log roll from backup system table.</div>
 <dl>
@@ -1654,7 +1654,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>writeRegionServerLastLogRollResult</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.661">writeRegionServerLastLogRollResult</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;server,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.662">writeRegionServerLastLogRollResult</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;server,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&nbsp;ts,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1675,7 +1675,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getBackupHistory</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.677">getBackupHistory</a>(boolean&nbsp;onlyCompleted)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.678">getBackupHistory</a>(boolean&nbsp;onlyCompleted)
                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get all completed backup information (in desc order by time)</div>
 <dl>
@@ -1694,7 +1694,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getBackupHistory</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.690">getBackupHistory</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.691">getBackupHistory</a>()
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get all backups history</div>
 <dl>
@@ -1711,7 +1711,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getHistory</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.700">getHistory</a>(int&nbsp;n)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.701">getHistory</a>(int&nbsp;n)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get first n backup history records</div>
 <dl>
@@ -1730,7 +1730,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getBackupHistory</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.715">getBackupHistory</a>(int&nbsp;n,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.716">getBackupHistory</a>(int&nbsp;n,
                                          <a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.Filter.html" title="interface in org.apache.hadoop.hbase.backup">BackupInfo.Filter</a>...&nbsp;filters)
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get backup history records filtered by list of filters.</div>
@@ -1751,7 +1751,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getTablesForBackupType</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.746">getTablesForBackupType</a>(<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup">BackupType</a>&nbsp;type)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.747">getTablesForBackupType</a>(<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup">BackupType</a>&nbsp;type)
                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1765,7 +1765,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getBackupHistory</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.763">getBackupHistory</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.764">getBackupHistory</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get history for backup destination</div>
 <dl>
@@ -1784,7 +1784,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getBackupHistoryForTable</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.780">getBackupHistoryForTable</a>(<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;name)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.781">getBackupHistoryForTable</a>(<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;name)
                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get history for a table</div>
 <dl>
@@ -1803,7 +1803,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getBackupHistoryForTableSet</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.792">getBackupHistoryForTableSet</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;set,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.793">getBackupHistoryForTableSet</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;set,
                                                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
                                                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -1818,7 +1818,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getBackupInfos</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.822">getBackupInfos</a>(<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup">BackupInfo.BackupState</a>&nbsp;state)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.823">getBackupInfos</a>(<a href="../../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup">BackupInfo.BackupState</a>&nbsp;state)
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get all backup sessions with a given state (in descending order by time)</div>
 <dl>
@@ -1837,7 +1837,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>writeRegionServerLogTimestamp</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.852">writeRegionServerLogTimestamp</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;tables,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.853">writeRegionServerLogTimestamp</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;tables,
                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;newTimestamps,
                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1860,7 +1860,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>readLogTimestampMap</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.878">readLogTimestampMap</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&
 nbsp;backupRoot)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.879">readLogTimestampMap</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&
 nbsp;backupRoot)
                                                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Read the timestamp for each region server log after the last successful backup. Each table has
  its own set of the timestamps. The info is stored for each table as a concatenated string of
@@ -1882,7 +1882,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>toTableServerTimestampProto</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.911">toTableServerTimestampProto</a>(<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
+<pre>private&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.912">toTableServerTimestampProto</a>(<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                                                                                                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;map)</pre>
 </li>
 </ul>
@@ -1892,7 +1892,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fromTableServerTimestampProto</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.933">fromTableServerTimestampProto</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp&nbsp;proto)</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.934">fromTableServerTimestampProto</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp&nbsp;proto)</pre>
 </li>
 </ul>
 <a name="getIncrementalBackupTableSet-java.lang.String-">
@@ -1901,7 +1901,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getIncrementalBackupTableSet</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.951">getIncrementalBackupTableSet</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.952">getIncrementalBackupTableSet</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
                                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Return the current tables covered by incremental backup.</div>
 <dl>
@@ -1920,7 +1920,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>addIncrementalBackupTableSet</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.977">addIncrementalBackupTableSet</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;tables,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.978">addIncrementalBackupTableSet</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;tables,
                                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Add tables to global incremental backup set</div>
@@ -1939,7 +1939,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteIncrementalBackupTableSet</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.996">deleteIncrementalBackupTableSet</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.997">deleteIncrementalBackupTableSet</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Deletes incremental backup set for a backup destination</div>
 <dl>
@@ -1956,7 +1956,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>addWALFiles</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.1013">addWALFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;files,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.1014">addWALFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;files,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupId,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1977,7 +1977,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getWALFilesIterator</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html" title="class in org.apache.hadoop.hbase.backup.impl">BackupSystemTable.WALItem</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.1033">getWALFilesIterator</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html" title="class in org.apache.hadoop.hbase.backup.impl">BackupSystemTable.WALItem</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.1034">getWALFilesIterator</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)
                                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Register WAL files as eligible for deletion</div>
 <dl>
@@ -1994,7 +1994,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>isWALFileDeletable</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.1091">isWALFileDeletable</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;file)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.1092">isWALFileDeletable</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;file)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Check if WAL file is eligible for deletion Future: to support all backup destinations</div>
 <dl>
@@ -2013,7 +2013,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>areWALFilesDeletable</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;org.apache.hadoop.fs.FileStatus,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.1109">areWALFilesDeletable</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;files)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;org.apache.hadoop.fs.FileStatus,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.1110">areWALFilesDeletable</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;files)
                                                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Check if WAL file is eligible for deletion using multi-get</div>
 <dl>
@@ -2033,7 +2033,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>hasBackupSessions</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.1149">hasBackupSessions</a>()
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.1155">hasBackupSessions</a>()
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Checks if we have at least one backu

<TRUNCATED>

[28/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
index b7b4236..3d1edb3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
@@ -259,1863 +259,1867 @@
 <span class="sourceLineNo">251</span>   * + Metadata!  + &lt;= See note on BLOCK_METADATA_SPACE above.<a name="line.251"></a>
 <span class="sourceLineNo">252</span>   * ++++++++++++++<a name="line.252"></a>
 <span class="sourceLineNo">253</span>   * &lt;/code&gt;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer)<a name="line.254"></a>
+<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer, boolean)<a name="line.254"></a>
 <span class="sourceLineNo">255</span>   */<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER =<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    @Override<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      // The buf has the file block followed by block metadata.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      ByteBuff newByteBuff;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      if (reuse) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        newByteBuff = buf.slice();<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      } else {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        int len = buf.limit();<a name="line.269"></a>
-<span class="sourceLineNo">270</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      buf.position(buf.limit());<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      long offset = buf.getLong();<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      HFileBlock hFileBlock =<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      return hFileBlock;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    public int getDeserialiserIdentifier() {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return DESERIALIZER_IDENTIFIER;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    @Override<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      // Used only in tests<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  };<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  static {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    DESERIALIZER_IDENTIFIER =<a name="line.298"></a>
-<span class="sourceLineNo">299</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>  }<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  /**<a name="line.302"></a>
-<span class="sourceLineNo">303</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.303"></a>
-<span class="sourceLineNo">304</span>   */<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private HFileBlock(HFileBlock that) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    this(that, false);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
-<span class="sourceLineNo">308</span><a name="line.308"></a>
-<span class="sourceLineNo">309</span>  /**<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.310"></a>
-<span class="sourceLineNo">311</span>   * param.<a name="line.311"></a>
-<span class="sourceLineNo">312</span>   */<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.314"></a>
-<span class="sourceLineNo">315</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    if (bufCopy) {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    } else {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.buf = that.buf.duplicate();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    }<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * is used only while writing blocks and caching,<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   *<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.331"></a>
+<span class="sourceLineNo">256</span>  public static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER = new BlockDeserializer();<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  public static final class BlockDeserializer implements CacheableDeserializer&lt;Cacheable&gt; {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    private BlockDeserializer() {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>    @Override<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        throws IOException {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      // The buf has the file block followed by block metadata.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      ByteBuff newByteBuff;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      if (reuse) {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>        newByteBuff = buf.slice();<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      } else {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        int len = buf.limit();<a name="line.273"></a>
+<span class="sourceLineNo">274</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      buf.position(buf.limit());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      long offset = buf.getLong();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      HFileBlock hFileBlock =<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      return hFileBlock;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>    @Override<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    public int getDeserialiserIdentifier() {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      return DESERIALIZER_IDENTIFIER;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>    @Override<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      // Used only in tests<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  static {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    DESERIALIZER_IDENTIFIER =<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  }<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /**<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   */<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private HFileBlock(HFileBlock that) {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    this(that, false);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * param.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.318"></a>
+<span class="sourceLineNo">319</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    if (bufCopy) {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    } else {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>      this.buf = that.buf.duplicate();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    }<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * is used only while writing blocks and caching,<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.331"></a>
 <span class="sourceLineNo">332</span>   *<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @param offset the file offset the block was read from<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   * @param fileContext HFile meta data<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  @VisibleForTesting<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      HFileContext fileContext) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    this.buf = new SingleByteBuff(b);<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    if (fillHeader) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      overwriteHeader();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    this.buf.rewind();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  /**<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * to that point.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>    buf.rewind();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    final BlockType blockType = BlockType.read(buf);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    final int uncompressedSizeWithoutHeader =<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    int onDiskDataSizeWithHeader;<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    if (usesHBaseChecksum) {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    fileContext = fileContextBuilder.build();<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    this.memType = memType;<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    this.offset = offset;<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    this.buf = buf;<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    this.buf.rewind();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  }<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  /**<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   * Called from constructors.<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      HFileContext fileContext) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    this.blockType = blockType;<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    this.offset = offset;<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    this.fileContext = fileContext;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  /**<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * Parse total on disk size including header and checksum.<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   * @return Size of the block with header included.<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   */<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      boolean verifyChecksum) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      headerSize(verifyChecksum);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * a read of the next block when scanning or running over a file.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  int getNextBlockOnDiskSize() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    return nextBlockOnDiskSize;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  @Override<a name="line.439"></a>
-<span class="sourceLineNo">440</span>  public BlockType getBlockType() {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    return blockType;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  short getDataBlockEncodingId() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    return buf.getShort(headerSize());<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  }<a name="line.451"></a>
-<span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>  /**<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   * @return the on-disk size of header + data part + checksum.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>   */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  public int getOnDiskSizeWithHeader() {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  int getOnDiskSizeWithoutHeader() {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    return onDiskSizeWithoutHeader;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>  }<a name="line.465"></a>
-<span class="sourceLineNo">466</span><a name="line.466"></a>
-<span class="sourceLineNo">467</span>  /**<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.468"></a>
-<span class="sourceLineNo">469</span>   */<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   int getUncompressedSizeWithoutHeader() {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    return uncompressedSizeWithoutHeader;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /**<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   *         -1 if unknown<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  long getPrevBlockOffset() {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    return prevBlockOffset;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>  }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>  /**<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * is modified as side-effect.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   */<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  private void overwriteHeader() {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    buf.rewind();<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    blockType.write(buf);<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    buf.putLong(prevBlockOffset);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>  }<a name="line.497"></a>
-<span class="sourceLineNo">498</span><a name="line.498"></a>
-<span class="sourceLineNo">499</span>  /**<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * Returns a buffer that does not include the header or checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   *<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    ByteBuff dup = getBufferReadOnly();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    // Now set it up so Buffer spans content only -- no header or no checksums.<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    return dup.position(headerSize()).limit(buf.limit() - totalChecksumBytes()).slice();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Returns a read-only duplicate of the buffer this block stores internally ready to be read.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * Clients must not modify the buffer object though they may set position and limit on the<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   * returned buffer since we pass back a duplicate. This method has to be public because it is used<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * filter lookup, but has to be used with caution. Buffer holds header, block content,<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * and any follow-on checksums if present.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   *<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @return the buffer of this block for read-only operations<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   */<a name="line.519"></a>
-<span class="sourceLineNo">520</span>  public ByteBuff getBufferReadOnly() {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    // TODO: ByteBuf does not support asReadOnlyBuffer(). Fix.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    ByteBuff dup = this.buf.duplicate();<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    assert dup.position() == 0;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return dup;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  @VisibleForTesting<a name="line.527"></a>
-<span class="sourceLineNo">528</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String fieldName) throws IOException {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    if (valueFromBuf != valueFromField) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  @VisibleForTesting<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      throws IOException {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    if (valueFromBuf != valueFromField) {<a name="line.539"></a>
-<span class="sourceLineNo">540</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.540"></a>
-<span class="sourceLineNo">541</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  }<a name="line.543"></a>
-<span class="sourceLineNo">544</span><a name="line.544"></a>
-<span class="sourceLineNo">545</span>  /**<a name="line.545"></a>
-<span class="sourceLineNo">546</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.547"></a>
-<span class="sourceLineNo">548</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * This function is primary for testing and debugging, and is not<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * Used by tests only.<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   */<a name="line.552"></a>
-<span class="sourceLineNo">553</span>  @VisibleForTesting<a name="line.553"></a>
-<span class="sourceLineNo">554</span>  void sanityCheck() throws IOException {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>    // Duplicate so no side-effects<a name="line.555"></a>
-<span class="sourceLineNo">556</span>    ByteBuff dup = this.buf.duplicate().rewind();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    sanityCheckAssertion(BlockType.read(dup), blockType);<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    sanityCheckAssertion(dup.getInt(), onDiskSizeWithoutHeader, "onDiskSizeWithoutHeader");<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    sanityCheckAssertion(dup.getInt(), uncompressedSizeWithoutHeader,<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        "uncompressedSizeWithoutHeader");<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    sanityCheckAssertion(dup.getLong(), prevBlockOffset, "prevBlockOffset");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      sanityCheckAssertion(dup.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      sanityCheckAssertion(dup.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.567"></a>
-<span class="sourceLineNo">568</span>          "bytesPerChecksum");<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      sanityCheckAssertion(dup.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
-<span class="sourceLineNo">571</span><a name="line.571"></a>
-<span class="sourceLineNo">572</span>    int cksumBytes = totalChecksumBytes();<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (dup.limit() != expectedBufLimit) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      throw new AssertionError("Expected limit " + expectedBufLimit + ", got " + dup.limit());<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    int hdrSize = headerSize();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    if (dup.capacity() != expectedBufLimit &amp;&amp; dup.capacity() != expectedBufLimit + hdrSize) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      throw new AssertionError("Invalid buffer capacity: " + dup.capacity() +<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  @Override<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  public String toString() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    StringBuilder sb = new StringBuilder()<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      .append("[")<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      .append("blockType=").append(blockType)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      .append(", fileOffset=").append(offset)<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      .append(", headerSize=").append(headerSize())<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      .append(", onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      .append(", uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      .append(", prevBlockOffset=").append(prevBlockOffset)<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      .append(", isUseHBaseChecksum=").append(fileContext.isUseHBaseChecksum());<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      sb.append(", checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.599"></a>
-<span class="sourceLineNo">600</span>        .append(", bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        .append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      sb.append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    String dataBegin = null;<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    if (buf.hasArray()) {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    } else {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.613"></a>
-<span class="sourceLineNo">614</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>    sb.append(", getOnDiskSizeWithHeader=").append(getOnDiskSizeWithHeader())<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      .append(", totalChecksumBytes=").append(totalChecksumBytes())<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      .append(", isUnpacked=").append(isUnpacked())<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      .append(", buf=[").append(buf).append("]")<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      .append(", dataBeginsWith=").append(dataBegin)<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      .append(", fileContext=").append(fileContext)<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      .append(", nextBlockOnDiskSize=").append(nextBlockOnDiskSize)<a name="line.624"></a>
-<span class="sourceLineNo">625</span>      .append("]");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    return sb.toString();<a name="line.626"></a>
-<span class="sourceLineNo">627</span>  }<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>  /**<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      // encryption details.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();<a name="line.645"></a>
-<span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>    ByteBuff dup = this.buf.duplicate();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    dup.position(this.headerSize());<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    dup = dup.slice();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      dup);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    return unpacked;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  /**<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   * Always allocates a new buffer of the correct size. Copies header bytes<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * from the existing buffer. Does not change header fields.<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   * Reserve room to keep checksum bytes too.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  private void allocateBuffer() {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    int cksumBytes = totalChecksumBytes();<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    int headerSize = headerSize();<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    int capacityNeeded = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // TODO we need consider allocating offheap here?<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);<a name="line.667"></a>
-<span class="sourceLineNo">668</span><a name="line.668"></a>
-<span class="sourceLineNo">669</span>    // Copy header bytes into newBuf.<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    // newBuf is HBB so no issue in calling array()<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    buf.position(0);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    buf.get(newBuf.array(), newBuf.arrayOffset(), headerSize);<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    buf = new SingleByteBuff(newBuf);<a name="line.674"></a>
-<span class="sourceLineNo">675</span>    // set limit to exclude next block's header<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * Return true when this block's buffer has been unpacked, false otherwise. Note this is a<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   * calculated heuristic, not tracked attribute of the block.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>   */<a name="line.682"></a>
-<span class="sourceLineNo">683</span>  public boolean isUnpacked() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>    final int cksumBytes = totalChecksumBytes();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    final int headerSize = headerSize();<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    final int bufCapacity = buf.capacity();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;<a name="line.688"></a>
-<span class="sourceLineNo">689</span>  }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>  /** An additional sanity-check in case no compression or encryption is being used. */<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  @VisibleForTesting<a name="line.692"></a>
-<span class="sourceLineNo">693</span>  void sanityCheckUncompressedSize() throws IOException {<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader + totalChecksumBytes()) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      throw new IOException("Using no compression but "<a name="line.695"></a>
-<span class="sourceLineNo">696</span>          + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "<a name="line.696"></a>
-<span class="sourceLineNo">697</span>          + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader<a name="line.697"></a>
-<span class="sourceLineNo">698</span>          + ", numChecksumbytes=" + totalChecksumBytes());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    }<a name="line.699"></a>
-<span class="sourceLineNo">700</span>  }<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>  /**<a name="line.702"></a>
-<span class="sourceLineNo">703</span>   * Cannot be {@link #UNSET}. Must be a legitimate value. Used re-making the {@link BlockCacheKey} when<a name="line.703"></a>
-<span class="sourceLineNo">704</span>   * block is returned to the cache.<a name="line.704"></a>
-<span class="sourceLineNo">705</span>   * @return the offset of this block in the file it was read from<a name="line.705"></a>
-<span class="sourceLineNo">706</span>   */<a name="line.706"></a>
-<span class="sourceLineNo">707</span>  long getOffset() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    if (offset &lt; 0) {<a name="line.708"></a>
-<span class="sourceLineNo">709</span>      throw new IllegalStateException("HFile block offset not initialized properly");<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    }<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    return offset;<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  /**<a name="line.714"></a>
-<span class="sourceLineNo">715</span>   * @return a byte stream reading the data + checksum of this block<a name="line.715"></a>
-<span class="sourceLineNo">716</span>   */<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  DataInputStream getByteStream() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    ByteBuff dup = this.buf.duplicate();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    dup.position(this.headerSize());<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new DataInputStream(new ByteBuffInputStream(dup));<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>  public long heapSize() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    long size = ClassSize.align(<a name="line.725"></a>
-<span class="sourceLineNo">726</span>        ClassSize.OBJECT +<a name="line.726"></a>
-<span class="sourceLineNo">727</span>        // Block type, multi byte buffer, MemoryType and meta references<a name="line.727"></a>
-<span class="sourceLineNo">728</span>        4 * ClassSize.REFERENCE +<a name="line.728"></a>
-<span class="sourceLineNo">729</span>        // On-disk size, uncompressed size, and next block's on-disk size<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        // bytePerChecksum and onDiskDataSize<a name="line.730"></a>
-<span class="sourceLineNo">731</span>        4 * Bytes.SIZEOF_INT +<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        // This and previous block offset<a name="line.732"></a>
-<span class="sourceLineNo">733</span>        2 * Bytes.SIZEOF_LONG +<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        // Heap size of the meta object. meta will be always not null.<a name="line.734"></a>
-<span class="sourceLineNo">735</span>        fileContext.heapSize()<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    );<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    if (buf != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      // Deep overhead of the byte buffer. Needs to be aligned separately.<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    return ClassSize.align(size);<a name="line.743"></a>
-<span class="sourceLineNo">744</span>  }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>  /**<a name="line.746"></a>
-<span class="sourceLineNo">747</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.747"></a>
-<span class="sourceLineNo">748</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.748"></a>
-<span class="sourceLineNo">749</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but specifies a<a name="line.749"></a>
-<span class="sourceLineNo">750</span>   * number of "extra" bytes to also optionally read.<a name="line.750"></a>
-<span class="sourceLineNo">751</span>   *<a name="line.751"></a>
-<span class="sourceLineNo">752</span>   * @param in the input stream to read from<a name="line.752"></a>
-<span class="sourceLineNo">753</span>   * @param buf the buffer to read into<a name="line.753"></a>
-<span class="sourceLineNo">754</span>   * @param bufOffset the destination offset in the buffer<a name="line.754"></a>
-<span class="sourceLineNo">755</span>   * @param necessaryLen the number of bytes that are absolutely necessary to read<a name="line.755"></a>
-<span class="sourceLineNo">756</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.756"></a>
-<span class="sourceLineNo">757</span>   * @return true if succeeded reading the extra bytes<a name="line.757"></a>
-<span class="sourceLineNo">758</span>   * @throws IOException if failed to read the necessary bytes<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   */<a name="line.759"></a>
-<span class="sourceLineNo">760</span>  static boolean readWithExtra(InputStream in, byte[] buf,<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      int bufOffset, int necessaryLen, int extraLen) throws IOException {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    while (bytesRemaining &gt; 0) {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      int ret = in.read(buf, bufOffset, bytesRemaining);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      if (ret == -1 &amp;&amp; bytesRemaining &lt;= extraLen) {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        // We could not read the "extra data", but that is OK.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>        break;<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      }<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (ret &lt; 0) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>        throw new IOException("Premature EOF from inputStream (read "<a name="line.770"></a>
-<span class="sourceLineNo">771</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.771"></a>
-<span class="sourceLineNo">772</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.772"></a>
-<span class="sourceLineNo">773</span>            + "successfully read "<a name="line.773"></a>
-<span class="sourceLineNo">774</span>            + (necessaryLen + extraLen - bytesRemaining));<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      }<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      bufOffset += ret;<a name="line.776"></a>
-<span class="sourceLineNo">777</span>      bytesRemaining -= ret;<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    }<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    return bytesRemaining &lt;= 0;<a name="line.779"></a>
-<span class="sourceLineNo">780</span>  }<a name="line.780"></a>
-<span class="sourceLineNo">781</span><a name="line.781"></a>
-<span class="sourceLineNo">782</span>  /**<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.784"></a>
-<span class="sourceLineNo">785</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but uses<a name="line.785"></a>
-<span class="sourceLineNo">786</span>   * positional read and specifies a number of "extra" bytes that would be<a name="line.786"></a>
-<span class="sourceLineNo">787</span>   * desirable but not absolutely necessary to read.<a name="line.787"></a>
-<span class="sourceLineNo">788</span>   *<a name="line.788"></a>
-<span class="sourceLineNo">789</span>   * @param in the input stream to read from<a name="line.789"></a>
-<span class="sourceLineNo">790</span>   * @param position the position within the stream from which to start reading<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * @param buf the buffer to read into<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * @param bufOffset the destination offset in the buffer<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   * @param necessaryLen the number of bytes that are absolutely necessary to<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   *     read<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   * @return true if and only if extraLen is &gt; 0 and reading those extra bytes<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   *     was successful<a name="line.797"></a>
-<span class="sourceLineNo">798</span>   * @throws IOException if failed to read the necessary bytes<a name="line.798"></a>
-<span class="sourceLineNo">799</span>   */<a name="line.799"></a>
-<span class="sourceLineNo">800</span>  @VisibleForTesting<a name="line.800"></a>
-<span class="sourceLineNo">801</span>  static boolean positionalReadWithExtra(FSDataInputStream in,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      long position, byte[] buf, int bufOffset, int necessaryLen, int extraLen)<a name="line.802"></a>
-<span class="sourceLineNo">803</span>      throws IOException {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>    int bytesRead = 0;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    while (bytesRead &lt; necessaryLen) {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      int ret = in.read(position, buf, bufOffset, bytesRemaining);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>      if (ret &lt; 0) {<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        throw new IOException("Premature EOF from inputStream (positional read "<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            + "successfully read " + bytesRead);<a name="line.812"></a>
-<span class="sourceLineNo">813</span>      }<a name="line.813"></a>
-<span class="sourceLineNo">814</span>      position += ret;<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      bufOffset += ret;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      bytesRemaining -= ret;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      bytesRead += ret;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>    }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    return bytesRead != necessaryLen &amp;&amp; bytesRemaining &lt;= 0;<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>   * Unified version 2 {@link HFile} block writer. The intended usage pattern<a name="line.823"></a>
-<span class="sourceLineNo">824</span>   * is as follows:<a name="line.824"></a>
-<span class="sourceLineNo">825</span>   * &lt;ol&gt;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>   * &lt;li&gt;Construct an {@link HFileBlock.Writer}, providing a compression algorithm.<a name="line.826"></a>
-<span class="sourceLineNo">827</span>   * &lt;li&gt;Call {@link Writer#startWriting} and get a data stream to write to.<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * &lt;li&gt;Write your data into the stream.<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * &lt;li&gt;Call Writer#writeHeaderAndData(FSDataOutputStream) as many times as you need to.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   * store the serialized block into an external stream.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * &lt;li&gt;Repeat to write more blocks.<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   * &lt;/ol&gt;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   * &lt;p&gt;<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   */<a name="line.834"></a>
-<span class="sourceLineNo">835</span>  static class Writer {<a name="line.835"></a>
-<span class="sourceLineNo">836</span>    private enum State {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>      INIT,<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      WRITING,<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      BLOCK_READY<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
-<span class="sourceLineNo">841</span><a name="line.841"></a>
-<span class="sourceLineNo">842</span>    /** Writer state. Used to ensure the correct usage protocol. */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    private State state = State.INIT;<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>    /** Data block encoder used for data blocks */<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    private final HFileDataBlockEncoder dataBlockEncoder;<a name="line.846"></a>
-<span class="sourceLineNo">847</span><a name="line.847"></a>
-<span class="sourceLineNo">848</span>    private HFileBlockEncodingContext dataBlockEncodingCtx;<a name="line.848"></a>
-<span class="sourceLineNo">849</span><a name="line.849"></a>
-<span class="sourceLineNo">850</span>    /** block encoding context for non-data blocks*/<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;<a name="line.851"></a>
-<span class="sourceLineNo">852</span><a name="line.852"></a>
-<span class="sourceLineNo">853</span>    /**<a name="line.853"></a>
-<span class="sourceLineNo">854</span>     * The stream we use to accumulate data into a block in an uncompressed format.<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * We reset this stream at the end of each block and reuse it. The<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * header is written as the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes into this<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * stream.<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    private ByteArrayOutputStream baosInMemory;<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>    /**<a name="line.861"></a>
-<span class="sourceLineNo">862</span>     * Current block type. Set in {@link #startWriting(BlockType)}. Could be<a name="line.862"></a>
-<span class="sourceLineNo">863</span>     * changed in {@link #finishBlock()} from {@link BlockType#DATA}<a name="line.863"></a>
-<span class="sourceLineNo">864</span>     * to {@link BlockType#ENCODED_DATA}.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>     */<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    private BlockType blockType;<a name="line.866"></a>
-<span class="sourceLineNo">867</span><a name="line.867"></a>
-<span class="sourceLineNo">868</span>    /**<a name="line.868"></a>
-<span class="sourceLineNo">869</span>     * A stream that we write uncompressed bytes to, which compresses them and<a name="line.869"></a>
-<span class="sourceLineNo">870</span>     * writes them to {@link #baosInMemory}.<a name="line.870"></a>
-<span class="sourceLineNo">871</span>     */<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    private DataOutputStream userDataStream;<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // Size of actual data being written. Not considering the block encoding/compression. This<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    // includes the header size also.<a name="line.875"></a>
-<span class="sourceLineNo">876</span>    private int unencodedDataSizeWritten;<a name="line.876"></a>
+<span class="sourceLineNo">333</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.335"></a>
+<span class="sourceLineNo">336</span>   *<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * @param offset the file offset the block was read from<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * @param fileContext HFile meta data<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      HFileContext fileContext) {<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.352"></a>
+<span class="sourceLineNo">353</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    this.buf = new SingleByteBuff(b);<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    if (fillHeader) {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      overwriteHeader();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    this.buf.rewind();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * to that point.<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>    buf.rewind();<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    final BlockType blockType = BlockType.read(buf);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    final int uncompressedSizeWithoutHeader =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    int onDiskDataSizeWithHeader;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    if (usesHBaseChecksum) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    } else {<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    fileContext = fileContextBuilder.build();<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    this.memType = memType;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    this.offset = offset;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    this.buf = buf;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    this.buf.rewind();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  /**<a name="line.405"></a>
+<span class="sourceLineNo">406</span>   * Called from constructors.<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      HFileContext fileContext) {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>    this.blockType = blockType;<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    this.offset = offset;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    this.fileContext = fileContext;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * Parse total on disk size including header and checksum.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   * @return Size of the block with header included.<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      boolean verifyChecksum) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      headerSize(verifyChecksum);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.435"></a>
+<span class="sourceLineNo">436</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.436"></a>
+<span class="sourceLineNo">437</span>   * a read of the next block when scanning or running over a file.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  int getNextBlockOnDiskSize() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>    return nextBlockOnDiskSize;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  public BlockType getBlockType() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    return blockType;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  short getDataBlockEncodingId() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>    }<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    return buf.getShort(headerSize());<a name="line.454"></a>
+<span class="sourceLineNo">455</span>  }<a name="line.455"></a>
+<span class="sourceLineNo">456</span><a name="line.456"></a>
+<span class="sourceLineNo">457</span>  /**<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   * @return the on-disk size of header + data part + checksum.<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   */<a name="line.459"></a>
+<span class="sourceLineNo">460</span>  public int getOnDiskSizeWithHeader() {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.461"></a>
+<span class="sourceLineNo">462</span>  }<a name="line.462"></a>
+<span class="sourceLineNo">463</span><a name="line.463"></a>
+<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
+<span class="sourceLineNo">465</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.465"></a>
+<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  int getOnDiskSizeWithoutHeader() {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return onDiskSizeWithoutHeader;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   int getUncompressedSizeWithoutHeader() {<a name="line.474"></a>
+<span class="sourceLineNo">475</span>    return uncompressedSizeWithoutHeader;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>  }<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>  /**<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   *         -1 if unknown<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   */<a name="line.481"></a>
+<span class="sourceLineNo">482</span>  long getPrevBlockOffset() {<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    return prevBlockOffset;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /**<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * is modified as side-effect.<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   */<a name="line.489"></a>
+<span class="sourceLineNo">490</span>  private void overwriteHeader() {<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    buf.rewind();<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    blockType.write(buf);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    buf.putLong(prevBlockOffset);<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.498"></a>
+<span class="sourceLineNo">499</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    }<a name="line.500"></a>
+<span class="sourceLineNo">501</span>  }<a name="line.501"></a>
+<span class="sourceLineNo">502</span><a name="line.502"></a>
+<span class="sourceLineNo">

<TRUNCATED>

[25/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Header.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Header.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Header.html
index b7b4236..3d1edb3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Header.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Header.html
@@ -259,1863 +259,1867 @@
 <span class="sourceLineNo">251</span>   * + Metadata!  + &lt;= See note on BLOCK_METADATA_SPACE above.<a name="line.251"></a>
 <span class="sourceLineNo">252</span>   * ++++++++++++++<a name="line.252"></a>
 <span class="sourceLineNo">253</span>   * &lt;/code&gt;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer)<a name="line.254"></a>
+<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer, boolean)<a name="line.254"></a>
 <span class="sourceLineNo">255</span>   */<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER =<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    @Override<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      // The buf has the file block followed by block metadata.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      ByteBuff newByteBuff;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      if (reuse) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        newByteBuff = buf.slice();<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      } else {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        int len = buf.limit();<a name="line.269"></a>
-<span class="sourceLineNo">270</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      buf.position(buf.limit());<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      long offset = buf.getLong();<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      HFileBlock hFileBlock =<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      return hFileBlock;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    public int getDeserialiserIdentifier() {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return DESERIALIZER_IDENTIFIER;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    @Override<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      // Used only in tests<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  };<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  static {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    DESERIALIZER_IDENTIFIER =<a name="line.298"></a>
-<span class="sourceLineNo">299</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>  }<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  /**<a name="line.302"></a>
-<span class="sourceLineNo">303</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.303"></a>
-<span class="sourceLineNo">304</span>   */<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private HFileBlock(HFileBlock that) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    this(that, false);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
-<span class="sourceLineNo">308</span><a name="line.308"></a>
-<span class="sourceLineNo">309</span>  /**<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.310"></a>
-<span class="sourceLineNo">311</span>   * param.<a name="line.311"></a>
-<span class="sourceLineNo">312</span>   */<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.314"></a>
-<span class="sourceLineNo">315</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    if (bufCopy) {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    } else {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.buf = that.buf.duplicate();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    }<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * is used only while writing blocks and caching,<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   *<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.331"></a>
+<span class="sourceLineNo">256</span>  public static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER = new BlockDeserializer();<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  public static final class BlockDeserializer implements CacheableDeserializer&lt;Cacheable&gt; {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    private BlockDeserializer() {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>    @Override<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        throws IOException {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      // The buf has the file block followed by block metadata.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      ByteBuff newByteBuff;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      if (reuse) {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>        newByteBuff = buf.slice();<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      } else {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        int len = buf.limit();<a name="line.273"></a>
+<span class="sourceLineNo">274</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      buf.position(buf.limit());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      long offset = buf.getLong();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      HFileBlock hFileBlock =<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      return hFileBlock;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>    @Override<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    public int getDeserialiserIdentifier() {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      return DESERIALIZER_IDENTIFIER;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>    @Override<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      // Used only in tests<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  static {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    DESERIALIZER_IDENTIFIER =<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  }<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /**<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   */<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private HFileBlock(HFileBlock that) {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    this(that, false);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * param.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.318"></a>
+<span class="sourceLineNo">319</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    if (bufCopy) {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    } else {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>      this.buf = that.buf.duplicate();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    }<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * is used only while writing blocks and caching,<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.331"></a>
 <span class="sourceLineNo">332</span>   *<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @param offset the file offset the block was read from<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   * @param fileContext HFile meta data<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  @VisibleForTesting<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      HFileContext fileContext) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    this.buf = new SingleByteBuff(b);<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    if (fillHeader) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      overwriteHeader();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    this.buf.rewind();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  /**<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * to that point.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>    buf.rewind();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    final BlockType blockType = BlockType.read(buf);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    final int uncompressedSizeWithoutHeader =<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    int onDiskDataSizeWithHeader;<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    if (usesHBaseChecksum) {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    fileContext = fileContextBuilder.build();<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    this.memType = memType;<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    this.offset = offset;<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    this.buf = buf;<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    this.buf.rewind();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  }<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  /**<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   * Called from constructors.<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      HFileContext fileContext) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    this.blockType = blockType;<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    this.offset = offset;<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    this.fileContext = fileContext;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  /**<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * Parse total on disk size including header and checksum.<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   * @return Size of the block with header included.<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   */<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      boolean verifyChecksum) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      headerSize(verifyChecksum);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * a read of the next block when scanning or running over a file.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  int getNextBlockOnDiskSize() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    return nextBlockOnDiskSize;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  @Override<a name="line.439"></a>
-<span class="sourceLineNo">440</span>  public BlockType getBlockType() {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    return blockType;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  short getDataBlockEncodingId() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    return buf.getShort(headerSize());<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  }<a name="line.451"></a>
-<span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>  /**<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   * @return the on-disk size of header + data part + checksum.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>   */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  public int getOnDiskSizeWithHeader() {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  int getOnDiskSizeWithoutHeader() {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    return onDiskSizeWithoutHeader;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>  }<a name="line.465"></a>
-<span class="sourceLineNo">466</span><a name="line.466"></a>
-<span class="sourceLineNo">467</span>  /**<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.468"></a>
-<span class="sourceLineNo">469</span>   */<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   int getUncompressedSizeWithoutHeader() {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    return uncompressedSizeWithoutHeader;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /**<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   *         -1 if unknown<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  long getPrevBlockOffset() {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    return prevBlockOffset;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>  }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>  /**<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * is modified as side-effect.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   */<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  private void overwriteHeader() {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    buf.rewind();<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    blockType.write(buf);<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    buf.putLong(prevBlockOffset);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>  }<a name="line.497"></a>
-<span class="sourceLineNo">498</span><a name="line.498"></a>
-<span class="sourceLineNo">499</span>  /**<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * Returns a buffer that does not include the header or checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   *<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    ByteBuff dup = getBufferReadOnly();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    // Now set it up so Buffer spans content only -- no header or no checksums.<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    return dup.position(headerSize()).limit(buf.limit() - totalChecksumBytes()).slice();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Returns a read-only duplicate of the buffer this block stores internally ready to be read.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * Clients must not modify the buffer object though they may set position and limit on the<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   * returned buffer since we pass back a duplicate. This method has to be public because it is used<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * filter lookup, but has to be used with caution. Buffer holds header, block content,<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * and any follow-on checksums if present.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   *<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @return the buffer of this block for read-only operations<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   */<a name="line.519"></a>
-<span class="sourceLineNo">520</span>  public ByteBuff getBufferReadOnly() {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    // TODO: ByteBuf does not support asReadOnlyBuffer(). Fix.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    ByteBuff dup = this.buf.duplicate();<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    assert dup.position() == 0;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return dup;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  @VisibleForTesting<a name="line.527"></a>
-<span class="sourceLineNo">528</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String fieldName) throws IOException {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    if (valueFromBuf != valueFromField) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  @VisibleForTesting<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      throws IOException {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    if (valueFromBuf != valueFromField) {<a name="line.539"></a>
-<span class="sourceLineNo">540</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.540"></a>
-<span class="sourceLineNo">541</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  }<a name="line.543"></a>
-<span class="sourceLineNo">544</span><a name="line.544"></a>
-<span class="sourceLineNo">545</span>  /**<a name="line.545"></a>
-<span class="sourceLineNo">546</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.547"></a>
-<span class="sourceLineNo">548</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * This function is primary for testing and debugging, and is not<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * Used by tests only.<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   */<a name="line.552"></a>
-<span class="sourceLineNo">553</span>  @VisibleForTesting<a name="line.553"></a>
-<span class="sourceLineNo">554</span>  void sanityCheck() throws IOException {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>    // Duplicate so no side-effects<a name="line.555"></a>
-<span class="sourceLineNo">556</span>    ByteBuff dup = this.buf.duplicate().rewind();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    sanityCheckAssertion(BlockType.read(dup), blockType);<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    sanityCheckAssertion(dup.getInt(), onDiskSizeWithoutHeader, "onDiskSizeWithoutHeader");<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    sanityCheckAssertion(dup.getInt(), uncompressedSizeWithoutHeader,<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        "uncompressedSizeWithoutHeader");<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    sanityCheckAssertion(dup.getLong(), prevBlockOffset, "prevBlockOffset");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      sanityCheckAssertion(dup.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      sanityCheckAssertion(dup.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.567"></a>
-<span class="sourceLineNo">568</span>          "bytesPerChecksum");<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      sanityCheckAssertion(dup.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
-<span class="sourceLineNo">571</span><a name="line.571"></a>
-<span class="sourceLineNo">572</span>    int cksumBytes = totalChecksumBytes();<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (dup.limit() != expectedBufLimit) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      throw new AssertionError("Expected limit " + expectedBufLimit + ", got " + dup.limit());<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    int hdrSize = headerSize();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    if (dup.capacity() != expectedBufLimit &amp;&amp; dup.capacity() != expectedBufLimit + hdrSize) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      throw new AssertionError("Invalid buffer capacity: " + dup.capacity() +<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  @Override<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  public String toString() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    StringBuilder sb = new StringBuilder()<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      .append("[")<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      .append("blockType=").append(blockType)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      .append(", fileOffset=").append(offset)<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      .append(", headerSize=").append(headerSize())<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      .append(", onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      .append(", uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      .append(", prevBlockOffset=").append(prevBlockOffset)<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      .append(", isUseHBaseChecksum=").append(fileContext.isUseHBaseChecksum());<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      sb.append(", checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.599"></a>
-<span class="sourceLineNo">600</span>        .append(", bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        .append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      sb.append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    String dataBegin = null;<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    if (buf.hasArray()) {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    } else {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.613"></a>
-<span class="sourceLineNo">614</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>    sb.append(", getOnDiskSizeWithHeader=").append(getOnDiskSizeWithHeader())<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      .append(", totalChecksumBytes=").append(totalChecksumBytes())<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      .append(", isUnpacked=").append(isUnpacked())<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      .append(", buf=[").append(buf).append("]")<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      .append(", dataBeginsWith=").append(dataBegin)<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      .append(", fileContext=").append(fileContext)<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      .append(", nextBlockOnDiskSize=").append(nextBlockOnDiskSize)<a name="line.624"></a>
-<span class="sourceLineNo">625</span>      .append("]");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    return sb.toString();<a name="line.626"></a>
-<span class="sourceLineNo">627</span>  }<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>  /**<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      // encryption details.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();<a name="line.645"></a>
-<span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>    ByteBuff dup = this.buf.duplicate();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    dup.position(this.headerSize());<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    dup = dup.slice();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      dup);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    return unpacked;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  /**<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   * Always allocates a new buffer of the correct size. Copies header bytes<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * from the existing buffer. Does not change header fields.<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   * Reserve room to keep checksum bytes too.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  private void allocateBuffer() {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    int cksumBytes = totalChecksumBytes();<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    int headerSize = headerSize();<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    int capacityNeeded = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // TODO we need consider allocating offheap here?<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);<a name="line.667"></a>
-<span class="sourceLineNo">668</span><a name="line.668"></a>
-<span class="sourceLineNo">669</span>    // Copy header bytes into newBuf.<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    // newBuf is HBB so no issue in calling array()<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    buf.position(0);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    buf.get(newBuf.array(), newBuf.arrayOffset(), headerSize);<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    buf = new SingleByteBuff(newBuf);<a name="line.674"></a>
-<span class="sourceLineNo">675</span>    // set limit to exclude next block's header<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * Return true when this block's buffer has been unpacked, false otherwise. Note this is a<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   * calculated heuristic, not tracked attribute of the block.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>   */<a name="line.682"></a>
-<span class="sourceLineNo">683</span>  public boolean isUnpacked() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>    final int cksumBytes = totalChecksumBytes();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    final int headerSize = headerSize();<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    final int bufCapacity = buf.capacity();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;<a name="line.688"></a>
-<span class="sourceLineNo">689</span>  }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>  /** An additional sanity-check in case no compression or encryption is being used. */<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  @VisibleForTesting<a name="line.692"></a>
-<span class="sourceLineNo">693</span>  void sanityCheckUncompressedSize() throws IOException {<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader + totalChecksumBytes()) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      throw new IOException("Using no compression but "<a name="line.695"></a>
-<span class="sourceLineNo">696</span>          + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "<a name="line.696"></a>
-<span class="sourceLineNo">697</span>          + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader<a name="line.697"></a>
-<span class="sourceLineNo">698</span>          + ", numChecksumbytes=" + totalChecksumBytes());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    }<a name="line.699"></a>
-<span class="sourceLineNo">700</span>  }<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>  /**<a name="line.702"></a>
-<span class="sourceLineNo">703</span>   * Cannot be {@link #UNSET}. Must be a legitimate value. Used re-making the {@link BlockCacheKey} when<a name="line.703"></a>
-<span class="sourceLineNo">704</span>   * block is returned to the cache.<a name="line.704"></a>
-<span class="sourceLineNo">705</span>   * @return the offset of this block in the file it was read from<a name="line.705"></a>
-<span class="sourceLineNo">706</span>   */<a name="line.706"></a>
-<span class="sourceLineNo">707</span>  long getOffset() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    if (offset &lt; 0) {<a name="line.708"></a>
-<span class="sourceLineNo">709</span>      throw new IllegalStateException("HFile block offset not initialized properly");<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    }<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    return offset;<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  /**<a name="line.714"></a>
-<span class="sourceLineNo">715</span>   * @return a byte stream reading the data + checksum of this block<a name="line.715"></a>
-<span class="sourceLineNo">716</span>   */<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  DataInputStream getByteStream() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    ByteBuff dup = this.buf.duplicate();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    dup.position(this.headerSize());<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new DataInputStream(new ByteBuffInputStream(dup));<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>  public long heapSize() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    long size = ClassSize.align(<a name="line.725"></a>
-<span class="sourceLineNo">726</span>        ClassSize.OBJECT +<a name="line.726"></a>
-<span class="sourceLineNo">727</span>        // Block type, multi byte buffer, MemoryType and meta references<a name="line.727"></a>
-<span class="sourceLineNo">728</span>        4 * ClassSize.REFERENCE +<a name="line.728"></a>
-<span class="sourceLineNo">729</span>        // On-disk size, uncompressed size, and next block's on-disk size<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        // bytePerChecksum and onDiskDataSize<a name="line.730"></a>
-<span class="sourceLineNo">731</span>        4 * Bytes.SIZEOF_INT +<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        // This and previous block offset<a name="line.732"></a>
-<span class="sourceLineNo">733</span>        2 * Bytes.SIZEOF_LONG +<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        // Heap size of the meta object. meta will be always not null.<a name="line.734"></a>
-<span class="sourceLineNo">735</span>        fileContext.heapSize()<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    );<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    if (buf != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      // Deep overhead of the byte buffer. Needs to be aligned separately.<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    return ClassSize.align(size);<a name="line.743"></a>
-<span class="sourceLineNo">744</span>  }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>  /**<a name="line.746"></a>
-<span class="sourceLineNo">747</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.747"></a>
-<span class="sourceLineNo">748</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.748"></a>
-<span class="sourceLineNo">749</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but specifies a<a name="line.749"></a>
-<span class="sourceLineNo">750</span>   * number of "extra" bytes to also optionally read.<a name="line.750"></a>
-<span class="sourceLineNo">751</span>   *<a name="line.751"></a>
-<span class="sourceLineNo">752</span>   * @param in the input stream to read from<a name="line.752"></a>
-<span class="sourceLineNo">753</span>   * @param buf the buffer to read into<a name="line.753"></a>
-<span class="sourceLineNo">754</span>   * @param bufOffset the destination offset in the buffer<a name="line.754"></a>
-<span class="sourceLineNo">755</span>   * @param necessaryLen the number of bytes that are absolutely necessary to read<a name="line.755"></a>
-<span class="sourceLineNo">756</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.756"></a>
-<span class="sourceLineNo">757</span>   * @return true if succeeded reading the extra bytes<a name="line.757"></a>
-<span class="sourceLineNo">758</span>   * @throws IOException if failed to read the necessary bytes<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   */<a name="line.759"></a>
-<span class="sourceLineNo">760</span>  static boolean readWithExtra(InputStream in, byte[] buf,<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      int bufOffset, int necessaryLen, int extraLen) throws IOException {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    while (bytesRemaining &gt; 0) {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      int ret = in.read(buf, bufOffset, bytesRemaining);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      if (ret == -1 &amp;&amp; bytesRemaining &lt;= extraLen) {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        // We could not read the "extra data", but that is OK.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>        break;<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      }<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (ret &lt; 0) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>        throw new IOException("Premature EOF from inputStream (read "<a name="line.770"></a>
-<span class="sourceLineNo">771</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.771"></a>
-<span class="sourceLineNo">772</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.772"></a>
-<span class="sourceLineNo">773</span>            + "successfully read "<a name="line.773"></a>
-<span class="sourceLineNo">774</span>            + (necessaryLen + extraLen - bytesRemaining));<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      }<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      bufOffset += ret;<a name="line.776"></a>
-<span class="sourceLineNo">777</span>      bytesRemaining -= ret;<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    }<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    return bytesRemaining &lt;= 0;<a name="line.779"></a>
-<span class="sourceLineNo">780</span>  }<a name="line.780"></a>
-<span class="sourceLineNo">781</span><a name="line.781"></a>
-<span class="sourceLineNo">782</span>  /**<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.784"></a>
-<span class="sourceLineNo">785</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but uses<a name="line.785"></a>
-<span class="sourceLineNo">786</span>   * positional read and specifies a number of "extra" bytes that would be<a name="line.786"></a>
-<span class="sourceLineNo">787</span>   * desirable but not absolutely necessary to read.<a name="line.787"></a>
-<span class="sourceLineNo">788</span>   *<a name="line.788"></a>
-<span class="sourceLineNo">789</span>   * @param in the input stream to read from<a name="line.789"></a>
-<span class="sourceLineNo">790</span>   * @param position the position within the stream from which to start reading<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * @param buf the buffer to read into<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * @param bufOffset the destination offset in the buffer<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   * @param necessaryLen the number of bytes that are absolutely necessary to<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   *     read<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   * @return true if and only if extraLen is &gt; 0 and reading those extra bytes<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   *     was successful<a name="line.797"></a>
-<span class="sourceLineNo">798</span>   * @throws IOException if failed to read the necessary bytes<a name="line.798"></a>
-<span class="sourceLineNo">799</span>   */<a name="line.799"></a>
-<span class="sourceLineNo">800</span>  @VisibleForTesting<a name="line.800"></a>
-<span class="sourceLineNo">801</span>  static boolean positionalReadWithExtra(FSDataInputStream in,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      long position, byte[] buf, int bufOffset, int necessaryLen, int extraLen)<a name="line.802"></a>
-<span class="sourceLineNo">803</span>      throws IOException {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>    int bytesRead = 0;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    while (bytesRead &lt; necessaryLen) {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      int ret = in.read(position, buf, bufOffset, bytesRemaining);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>      if (ret &lt; 0) {<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        throw new IOException("Premature EOF from inputStream (positional read "<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            + "successfully read " + bytesRead);<a name="line.812"></a>
-<span class="sourceLineNo">813</span>      }<a name="line.813"></a>
-<span class="sourceLineNo">814</span>      position += ret;<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      bufOffset += ret;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      bytesRemaining -= ret;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      bytesRead += ret;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>    }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    return bytesRead != necessaryLen &amp;&amp; bytesRemaining &lt;= 0;<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>   * Unified version 2 {@link HFile} block writer. The intended usage pattern<a name="line.823"></a>
-<span class="sourceLineNo">824</span>   * is as follows:<a name="line.824"></a>
-<span class="sourceLineNo">825</span>   * &lt;ol&gt;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>   * &lt;li&gt;Construct an {@link HFileBlock.Writer}, providing a compression algorithm.<a name="line.826"></a>
-<span class="sourceLineNo">827</span>   * &lt;li&gt;Call {@link Writer#startWriting} and get a data stream to write to.<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * &lt;li&gt;Write your data into the stream.<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * &lt;li&gt;Call Writer#writeHeaderAndData(FSDataOutputStream) as many times as you need to.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   * store the serialized block into an external stream.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * &lt;li&gt;Repeat to write more blocks.<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   * &lt;/ol&gt;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   * &lt;p&gt;<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   */<a name="line.834"></a>
-<span class="sourceLineNo">835</span>  static class Writer {<a name="line.835"></a>
-<span class="sourceLineNo">836</span>    private enum State {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>      INIT,<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      WRITING,<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      BLOCK_READY<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
-<span class="sourceLineNo">841</span><a name="line.841"></a>
-<span class="sourceLineNo">842</span>    /** Writer state. Used to ensure the correct usage protocol. */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    private State state = State.INIT;<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>    /** Data block encoder used for data blocks */<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    private final HFileDataBlockEncoder dataBlockEncoder;<a name="line.846"></a>
-<span class="sourceLineNo">847</span><a name="line.847"></a>
-<span class="sourceLineNo">848</span>    private HFileBlockEncodingContext dataBlockEncodingCtx;<a name="line.848"></a>
-<span class="sourceLineNo">849</span><a name="line.849"></a>
-<span class="sourceLineNo">850</span>    /** block encoding context for non-data blocks*/<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;<a name="line.851"></a>
-<span class="sourceLineNo">852</span><a name="line.852"></a>
-<span class="sourceLineNo">853</span>    /**<a name="line.853"></a>
-<span class="sourceLineNo">854</span>     * The stream we use to accumulate data into a block in an uncompressed format.<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * We reset this stream at the end of each block and reuse it. The<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * header is written as the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes into this<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * stream.<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    private ByteArrayOutputStream baosInMemory;<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>    /**<a name="line.861"></a>
-<span class="sourceLineNo">862</span>     * Current block type. Set in {@link #startWriting(BlockType)}. Could be<a name="line.862"></a>
-<span class="sourceLineNo">863</span>     * changed in {@link #finishBlock()} from {@link BlockType#DATA}<a name="line.863"></a>
-<span class="sourceLineNo">864</span>     * to {@link BlockType#ENCODED_DATA}.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>     */<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    private BlockType blockType;<a name="line.866"></a>
-<span class="sourceLineNo">867</span><a name="line.867"></a>
-<span class="sourceLineNo">868</span>    /**<a name="line.868"></a>
-<span class="sourceLineNo">869</span>     * A stream that we write uncompressed bytes to, which compresses them and<a name="line.869"></a>
-<span class="sourceLineNo">870</span>     * writes them to {@link #baosInMemory}.<a name="line.870"></a>
-<span class="sourceLineNo">871</span>     */<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    private DataOutputStream userDataStream;<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // Size of actual data being written. Not considering the block encoding/compression. This<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    // includes the header size also.<a name="line.875"></a>
-<span class="sourceLineNo">876</span>    private int unencodedDataSizeWritten;<a name="line.876"></a>
+<span class="sourceLineNo">333</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.335"></a>
+<span class="sourceLineNo">336</span>   *<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * @param offset the file offset the block was read from<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * @param fileContext HFile meta data<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      HFileContext fileContext) {<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.352"></a>
+<span class="sourceLineNo">353</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    this.buf = new SingleByteBuff(b);<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    if (fillHeader) {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      overwriteHeader();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    this.buf.rewind();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * to that point.<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>    buf.rewind();<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    final BlockType blockType = BlockType.read(buf);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    final int uncompressedSizeWithoutHeader =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    int onDiskDataSizeWithHeader;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    if (usesHBaseChecksum) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    } else {<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    fileContext = fileContextBuilder.build();<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    this.memType = memType;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    this.offset = offset;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    this.buf = buf;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    this.buf.rewind();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  /**<a name="line.405"></a>
+<span class="sourceLineNo">406</span>   * Called from constructors.<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      HFileContext fileContext) {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>    this.blockType = blockType;<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    this.offset = offset;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    this.fileContext = fileContext;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * Parse total on disk size including header and checksum.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   * @return Size of the block with header included.<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      boolean verifyChecksum) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      headerSize(verifyChecksum);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.435"></a>
+<span class="sourceLineNo">436</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.436"></a>
+<span class="sourceLineNo">437</span>   * a read of the next block when scanning or running over a file.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  int getNextBlockOnDiskSize() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>    return nextBlockOnDiskSize;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  public BlockType getBlockType() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    return blockType;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  short getDataBlockEncodingId() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>    }<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    return buf.getShort(headerSize());<a name="line.454"></a>
+<span class="sourceLineNo">455</span>  }<a name="line.455"></a>
+<span class="sourceLineNo">456</span><a name="line.456"></a>
+<span class="sourceLineNo">457</span>  /**<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   * @return the on-disk size of header + data part + checksum.<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   */<a name="line.459"></a>
+<span class="sourceLineNo">460</span>  public int getOnDiskSizeWithHeader() {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.461"></a>
+<span class="sourceLineNo">462</span>  }<a name="line.462"></a>
+<span class="sourceLineNo">463</span><a name="line.463"></a>
+<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
+<span class="sourceLineNo">465</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.465"></a>
+<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  int getOnDiskSizeWithoutHeader() {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return onDiskSizeWithoutHeader;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   int getUncompressedSizeWithoutHeader() {<a name="line.474"></a>
+<span class="sourceLineNo">475</span>    return uncompressedSizeWithoutHeader;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>  }<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>  /**<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   *         -1 if unknown<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   */<a name="line.481"></a>
+<span class="sourceLineNo">482</span>  long getPrevBlockOffset() {<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    return prevBlockOffset;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /**<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * is modified as side-effect.<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   */<a name="line.489"></a>
+<span class="sourceLineNo">490</span>  private void overwriteHeader() {<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    buf.rewind();<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    blockType.write(buf);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    buf.putLong(prevBlockOffset);<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.498"></a>
+<span class="sourceLineNo">499</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    }<a name="line.500"></a>
+<span class="sourceLineNo">501</span>  }<a name="line.501"></a>
+<span class="sourceLineNo">502</span><a name="line.502"></a>
+<span class="sourceLineNo">503</span>  /**<a name="line.503"><

<TRUNCATED>

[33/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
index 1e0659a..981ebcd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
@@ -73,1969 +73,1975 @@
 <span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Table;<a name="line.65"></a>
 <span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.66"></a>
 <span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.slf4j.Logger;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.slf4j.LoggerFactory;<a name="line.75"></a>
-<span class="sourceLineNo">076</span><a name="line.76"></a>
-<span class="sourceLineNo">077</span>/**<a name="line.77"></a>
-<span class="sourceLineNo">078</span> * This class provides API to access backup system table&lt;br&gt;<a name="line.78"></a>
-<span class="sourceLineNo">079</span> * Backup system table schema:&lt;br&gt;<a name="line.79"></a>
-<span class="sourceLineNo">080</span> * &lt;p&gt;<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * &lt;ul&gt;<a name="line.81"></a>
-<span class="sourceLineNo">082</span> * &lt;li&gt;1. Backup sessions rowkey= "session:"+backupId; value =serialized BackupInfo&lt;/li&gt;<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * &lt;li&gt;2. Backup start code rowkey = "startcode:"+backupRoot; value = startcode&lt;/li&gt;<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * &lt;li&gt;3. Incremental backup set rowkey="incrbackupset:"+backupRoot; value=[list of tables]&lt;/li&gt;<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * &lt;li&gt;4. Table-RS-timestamp map rowkey="trslm:"+backupRoot+table_name; value = map[RS-&gt; last WAL<a name="line.85"></a>
-<span class="sourceLineNo">086</span> * timestamp]&lt;/li&gt;<a name="line.86"></a>
-<span class="sourceLineNo">087</span> * &lt;li&gt;5. RS - WAL ts map rowkey="rslogts:"+backupRoot +server; value = last WAL timestamp&lt;/li&gt;<a name="line.87"></a>
-<span class="sourceLineNo">088</span> * &lt;li&gt;6. WALs recorded rowkey="wals:"+WAL unique file name; value = backupId and full WAL file<a name="line.88"></a>
-<span class="sourceLineNo">089</span> * name&lt;/li&gt;<a name="line.89"></a>
-<span class="sourceLineNo">090</span> * &lt;/ul&gt;<a name="line.90"></a>
-<span class="sourceLineNo">091</span> * &lt;/p&gt;<a name="line.91"></a>
-<span class="sourceLineNo">092</span> */<a name="line.92"></a>
-<span class="sourceLineNo">093</span>@InterfaceAudience.Private<a name="line.93"></a>
-<span class="sourceLineNo">094</span>public final class BackupSystemTable implements Closeable {<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  private static final Logger LOG = LoggerFactory.getLogger(BackupSystemTable.class);<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>  static class WALItem {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    String backupId;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    String walFile;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    String backupRoot;<a name="line.101"></a>
-<span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>    WALItem(String backupId, String walFile, String backupRoot) {<a name="line.103"></a>
-<span class="sourceLineNo">104</span>      this.backupId = backupId;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>      this.walFile = walFile;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      this.backupRoot = backupRoot;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    }<a name="line.107"></a>
-<span class="sourceLineNo">108</span><a name="line.108"></a>
-<span class="sourceLineNo">109</span>    public String getBackupId() {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      return backupId;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    }<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>    public String getWalFile() {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      return walFile;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    }<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>    public String getBackupRoot() {<a name="line.117"></a>
-<span class="sourceLineNo">118</span>      return backupRoot;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>    @Override<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    public String toString() {<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      return Path.SEPARATOR + backupRoot + Path.SEPARATOR + backupId + Path.SEPARATOR + walFile;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    }<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  }<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  /**<a name="line.127"></a>
-<span class="sourceLineNo">128</span>   * Backup system table (main) name<a name="line.128"></a>
-<span class="sourceLineNo">129</span>   */<a name="line.129"></a>
-<span class="sourceLineNo">130</span>  private TableName tableName;<a name="line.130"></a>
-<span class="sourceLineNo">131</span><a name="line.131"></a>
-<span class="sourceLineNo">132</span>  /**<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * Backup System table name for bulk loaded files. We keep all bulk loaded file references in a<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * separate table because we have to isolate general backup operations: create, merge etc from<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * activity of RegionObserver, which controls process of a bulk loading<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * {@link org.apache.hadoop.hbase.backup.BackupObserver}<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  private TableName bulkLoadTableName;<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>  /**<a name="line.140"></a>
-<span class="sourceLineNo">141</span>   * Stores backup sessions (contexts)<a name="line.141"></a>
-<span class="sourceLineNo">142</span>   */<a name="line.142"></a>
-<span class="sourceLineNo">143</span>  final static byte[] SESSIONS_FAMILY = "session".getBytes();<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  /**<a name="line.144"></a>
-<span class="sourceLineNo">145</span>   * Stores other meta<a name="line.145"></a>
-<span class="sourceLineNo">146</span>   */<a name="line.146"></a>
-<span class="sourceLineNo">147</span>  final static byte[] META_FAMILY = "meta".getBytes();<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  final static byte[] BULK_LOAD_FAMILY = "bulk".getBytes();<a name="line.148"></a>
-<span class="sourceLineNo">149</span>  /**<a name="line.149"></a>
-<span class="sourceLineNo">150</span>   * Connection to HBase cluster, shared among all instances<a name="line.150"></a>
-<span class="sourceLineNo">151</span>   */<a name="line.151"></a>
-<span class="sourceLineNo">152</span>  private final Connection connection;<a name="line.152"></a>
-<span class="sourceLineNo">153</span><a name="line.153"></a>
-<span class="sourceLineNo">154</span>  private final static String BACKUP_INFO_PREFIX = "session:";<a name="line.154"></a>
-<span class="sourceLineNo">155</span>  private final static String START_CODE_ROW = "startcode:";<a name="line.155"></a>
-<span class="sourceLineNo">156</span>  private final static byte[] ACTIVE_SESSION_ROW = "activesession:".getBytes();<a name="line.156"></a>
-<span class="sourceLineNo">157</span>  private final static byte[] ACTIVE_SESSION_COL = "c".getBytes();<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>  private final static byte[] ACTIVE_SESSION_YES = "yes".getBytes();<a name="line.159"></a>
-<span class="sourceLineNo">160</span>  private final static byte[] ACTIVE_SESSION_NO = "no".getBytes();<a name="line.160"></a>
-<span class="sourceLineNo">161</span><a name="line.161"></a>
-<span class="sourceLineNo">162</span>  private final static String INCR_BACKUP_SET = "incrbackupset:";<a name="line.162"></a>
-<span class="sourceLineNo">163</span>  private final static String TABLE_RS_LOG_MAP_PREFIX = "trslm:";<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  private final static String RS_LOG_TS_PREFIX = "rslogts:";<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  private final static String BULK_LOAD_PREFIX = "bulk:";<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  private final static byte[] BULK_LOAD_PREFIX_BYTES = BULK_LOAD_PREFIX.getBytes();<a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private final static byte[] DELETE_OP_ROW = "delete_op_row".getBytes();<a name="line.168"></a>
-<span class="sourceLineNo">169</span>  private final static byte[] MERGE_OP_ROW = "merge_op_row".getBytes();<a name="line.169"></a>
-<span class="sourceLineNo">170</span><a name="line.170"></a>
-<span class="sourceLineNo">171</span>  final static byte[] TBL_COL = Bytes.toBytes("tbl");<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  final static byte[] FAM_COL = Bytes.toBytes("fam");<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  final static byte[] PATH_COL = Bytes.toBytes("path");<a name="line.173"></a>
-<span class="sourceLineNo">174</span>  final static byte[] STATE_COL = Bytes.toBytes("state");<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  // the two states a bulk loaded file can be<a name="line.175"></a>
-<span class="sourceLineNo">176</span>  final static byte[] BL_PREPARE = Bytes.toBytes("R");<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  final static byte[] BL_COMMIT = Bytes.toBytes("D");<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  private final static String WALS_PREFIX = "wals:";<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  private final static String SET_KEY_PREFIX = "backupset:";<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>  // separator between BULK_LOAD_PREFIX and ordinals<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  protected final static String BLK_LD_DELIM = ":";<a name="line.183"></a>
-<span class="sourceLineNo">184</span>  private final static byte[] EMPTY_VALUE = new byte[] {};<a name="line.184"></a>
-<span class="sourceLineNo">185</span><a name="line.185"></a>
-<span class="sourceLineNo">186</span>  // Safe delimiter in a string<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  private final static String NULL = "\u0000";<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>  public BackupSystemTable(Connection conn) throws IOException {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>    this.connection = conn;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>    Configuration conf = this.connection.getConfiguration();<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    tableName = BackupSystemTable.getTableName(conf);<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    bulkLoadTableName = BackupSystemTable.getTableNameForBulkLoadedData(conf);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    checkSystemTable();<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private void checkSystemTable() throws IOException {<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    try (Admin admin = connection.getAdmin()) {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      verifyNamespaceExists(admin);<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      Configuration conf = connection.getConfiguration();<a name="line.200"></a>
-<span class="sourceLineNo">201</span>      if (!admin.tableExists(tableName)) {<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        TableDescriptor backupHTD = BackupSystemTable.getSystemTableDescriptor(conf);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>        admin.createTable(backupHTD);<a name="line.203"></a>
-<span class="sourceLineNo">204</span>      }<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      if (!admin.tableExists(bulkLoadTableName)) {<a name="line.205"></a>
-<span class="sourceLineNo">206</span>        TableDescriptor blHTD = BackupSystemTable.getSystemTableForBulkLoadedDataDescriptor(conf);<a name="line.206"></a>
-<span class="sourceLineNo">207</span>        admin.createTable(blHTD);<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      }<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      waitForSystemTable(admin, tableName);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      waitForSystemTable(admin, bulkLoadTableName);<a name="line.210"></a>
-<span class="sourceLineNo">211</span>    }<a name="line.211"></a>
-<span class="sourceLineNo">212</span>  }<a name="line.212"></a>
-<span class="sourceLineNo">213</span><a name="line.213"></a>
-<span class="sourceLineNo">214</span>  private void verifyNamespaceExists(Admin admin) throws IOException {<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    String namespaceName = tableName.getNamespaceAsString();<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    NamespaceDescriptor ns = NamespaceDescriptor.create(namespaceName).build();<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    NamespaceDescriptor[] list = admin.listNamespaceDescriptors();<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    boolean exists = false;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    for (NamespaceDescriptor nsd : list) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      if (nsd.getName().equals(ns.getName())) {<a name="line.220"></a>
-<span class="sourceLineNo">221</span>        exists = true;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        break;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    }<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    if (!exists) {<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      admin.createNamespace(ns);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    }<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  }<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private void waitForSystemTable(Admin admin, TableName tableName) throws IOException {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    long TIMEOUT = 60000;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    long startTime = EnvironmentEdgeManager.currentTime();<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    while (!admin.tableExists(tableName) || !admin.isTableAvailable(tableName)) {<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      try {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        Thread.sleep(100);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      } catch (InterruptedException e) {<a name="line.236"></a>
-<span class="sourceLineNo">237</span>      }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      if (EnvironmentEdgeManager.currentTime() - startTime &gt; TIMEOUT) {<a name="line.238"></a>
-<span class="sourceLineNo">239</span>        throw new IOException(<a name="line.239"></a>
-<span class="sourceLineNo">240</span>          "Failed to create backup system table " + tableName + " after " + TIMEOUT + "ms");<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      }<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    }<a name="line.242"></a>
-<span class="sourceLineNo">243</span>    LOG.debug("Backup table " + tableName + " exists and available");<a name="line.243"></a>
-<span class="sourceLineNo">244</span>  }<a name="line.244"></a>
-<span class="sourceLineNo">245</span><a name="line.245"></a>
-<span class="sourceLineNo">246</span>  @Override<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public void close() {<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    // do nothing<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  }<a name="line.249"></a>
-<span class="sourceLineNo">250</span><a name="line.250"></a>
-<span class="sourceLineNo">251</span>  /**<a name="line.251"></a>
-<span class="sourceLineNo">252</span>   * Updates status (state) of a backup session in backup system table table<a name="line.252"></a>
-<span class="sourceLineNo">253</span>   * @param info backup info<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @throws IOException exception<a name="line.254"></a>
-<span class="sourceLineNo">255</span>   */<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  public void updateBackupInfo(BackupInfo info) throws IOException {<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    if (LOG.isTraceEnabled()) {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      LOG.trace("update backup status in backup system table for: " + info.getBackupId()<a name="line.258"></a>
-<span class="sourceLineNo">259</span>        + " set status=" + info.getState());<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    try (Table table = connection.getTable(tableName)) {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      Put put = createPutForBackupInfo(info);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      table.put(put);<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
-<span class="sourceLineNo">265</span>  }<a name="line.265"></a>
-<span class="sourceLineNo">266</span><a name="line.266"></a>
-<span class="sourceLineNo">267</span>  /*<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * @param backupId the backup Id<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * @return Map of rows to path of bulk loaded hfile<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   */<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  Map&lt;byte[], String&gt; readBulkLoadedFiles(String backupId) throws IOException {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    Scan scan = BackupSystemTable.createScanForBulkLoadedFiles(backupId);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    try (Table table = connection.getTable(bulkLoadTableName);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>        ResultScanner scanner = table.getScanner(scan)) {<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      Result res = null;<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      Map&lt;byte[], String&gt; map = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      while ((res = scanner.next()) != null) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        res.advance();<a name="line.278"></a>
-<span class="sourceLineNo">279</span>        byte[] row = CellUtil.cloneRow(res.listCells().get(0));<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        for (Cell cell : res.listCells()) {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>          if (CellUtil.compareQualifiers(cell, BackupSystemTable.PATH_COL, 0,<a name="line.281"></a>
-<span class="sourceLineNo">282</span>            BackupSystemTable.PATH_COL.length) == 0) {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>            map.put(row, Bytes.toString(CellUtil.cloneValue(cell)));<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          }<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        }<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      }<a name="line.286"></a>
-<span class="sourceLineNo">287</span>      return map;<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    }<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  }<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  /*<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   * Used during restore<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   * @param backupId the backup Id<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * @param sTableList List of tables<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   * @return array of Map of family to List of Paths<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   */<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  public Map&lt;byte[], List&lt;Path&gt;&gt;[] readBulkLoadedFiles(String backupId, List&lt;TableName&gt; sTableList)<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      throws IOException {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    Scan scan = BackupSystemTable.createScanForBulkLoadedFiles(backupId);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    Map&lt;byte[], List&lt;Path&gt;&gt;[] mapForSrc = new Map[sTableList == null ? 1 : sTableList.size()];<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    try (Table table = connection.getTable(bulkLoadTableName);<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        ResultScanner scanner = table.getScanner(scan)) {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      Result res = null;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      while ((res = scanner.next()) != null) {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        res.advance();<a name="line.305"></a>
-<span class="sourceLineNo">306</span>        TableName tbl = null;<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        byte[] fam = null;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>        String path = null;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        for (Cell cell : res.listCells()) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          if (CellUtil.compareQualifiers(cell, BackupSystemTable.TBL_COL, 0,<a name="line.310"></a>
-<span class="sourceLineNo">311</span>            BackupSystemTable.TBL_COL.length) == 0) {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>            tbl = TableName.valueOf(CellUtil.cloneValue(cell));<a name="line.312"></a>
-<span class="sourceLineNo">313</span>          } else if (CellUtil.compareQualifiers(cell, BackupSystemTable.FAM_COL, 0,<a name="line.313"></a>
-<span class="sourceLineNo">314</span>            BackupSystemTable.FAM_COL.length) == 0) {<a name="line.314"></a>
-<span class="sourceLineNo">315</span>            fam = CellUtil.cloneValue(cell);<a name="line.315"></a>
-<span class="sourceLineNo">316</span>          } else if (CellUtil.compareQualifiers(cell, BackupSystemTable.PATH_COL, 0,<a name="line.316"></a>
-<span class="sourceLineNo">317</span>            BackupSystemTable.PATH_COL.length) == 0) {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>            path = Bytes.toString(CellUtil.cloneValue(cell));<a name="line.318"></a>
-<span class="sourceLineNo">319</span>          }<a name="line.319"></a>
-<span class="sourceLineNo">320</span>        }<a name="line.320"></a>
-<span class="sourceLineNo">321</span>        int srcIdx = IncrementalTableBackupClient.getIndex(tbl, sTableList);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>        if (srcIdx == -1) {<a name="line.322"></a>
-<span class="sourceLineNo">323</span>          // the table is not among the query<a name="line.323"></a>
-<span class="sourceLineNo">324</span>          continue;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        }<a name="line.325"></a>
-<span class="sourceLineNo">326</span>        if (mapForSrc[srcIdx] == null) {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>          mapForSrc[srcIdx] = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        }<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        List&lt;Path&gt; files;<a name="line.329"></a>
-<span class="sourceLineNo">330</span>        if (!mapForSrc[srcIdx].containsKey(fam)) {<a name="line.330"></a>
-<span class="sourceLineNo">331</span>          files = new ArrayList&lt;Path&gt;();<a name="line.331"></a>
-<span class="sourceLineNo">332</span>          mapForSrc[srcIdx].put(fam, files);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>        } else {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          files = mapForSrc[srcIdx].get(fam);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>        }<a name="line.335"></a>
-<span class="sourceLineNo">336</span>        files.add(new Path(path));<a name="line.336"></a>
-<span class="sourceLineNo">337</span>        if (LOG.isDebugEnabled()) {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          LOG.debug("found bulk loaded file : " + tbl + " " + Bytes.toString(fam) + " " + path);<a name="line.338"></a>
-<span class="sourceLineNo">339</span>        }<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      }<a name="line.340"></a>
-<span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>      return mapForSrc;<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    }<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  /**<a name="line.346"></a>
-<span class="sourceLineNo">347</span>   * Deletes backup status from backup system table table<a name="line.347"></a>
-<span class="sourceLineNo">348</span>   * @param backupId backup id<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   * @throws IOException exception<a name="line.349"></a>
-<span class="sourceLineNo">350</span>   */<a name="line.350"></a>
-<span class="sourceLineNo">351</span>  public void deleteBackupInfo(String backupId) throws IOException {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    if (LOG.isTraceEnabled()) {<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      LOG.trace("delete backup status in backup system table for " + backupId);<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    }<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    try (Table table = connection.getTable(tableName)) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      Delete del = createDeleteForBackupInfo(backupId);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      table.delete(del);<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    }<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  /*<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * For postBulkLoadHFile() hook.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param tabName table name<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @param region the region receiving hfile<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @param finalPaths family and associated hfiles<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  public void writePathsPostBulkLoad(TableName tabName, byte[] region,<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      Map&lt;byte[], List&lt;Path&gt;&gt; finalPaths) throws IOException {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    if (LOG.isDebugEnabled()) {<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      LOG.debug("write bulk load descriptor to backup " + tabName + " with " + finalPaths.size()<a name="line.370"></a>
-<span class="sourceLineNo">371</span>        + " entries");<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    }<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    try (Table table = connection.getTable(bulkLoadTableName)) {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      List&lt;Put&gt; puts = BackupSystemTable.createPutForCommittedBulkload(tabName, region, finalPaths);<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      table.put(puts);<a name="line.375"></a>
-<span class="sourceLineNo">376</span>      LOG.debug("written " + puts.size() + " rows for bulk load of " + tabName);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    }<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  }<a name="line.378"></a>
-<span class="sourceLineNo">379</span><a name="line.379"></a>
-<span class="sourceLineNo">380</span>  /*<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   * For preCommitStoreFile() hook<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   * @param tabName table name<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * @param region the region receiving hfile<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param family column family<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @param pairs list of paths for hfiles<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  public void writeFilesForBulkLoadPreCommit(TableName tabName, byte[] region, final byte[] family,<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      final List&lt;Pair&lt;Path, Path&gt;&gt; pairs) throws IOException {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    if (LOG.isDebugEnabled()) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>      LOG.debug(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        "write bulk load descriptor to backup " + tabName + " with " + pairs.size() + " entries");<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    }<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    try (Table table = connection.getTable(bulkLoadTableName)) {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      List&lt;Put&gt; puts =<a name="line.394"></a>
-<span class="sourceLineNo">395</span>          BackupSystemTable.createPutForPreparedBulkload(tabName, region, family, pairs);<a name="line.395"></a>
-<span class="sourceLineNo">396</span>      table.put(puts);<a name="line.396"></a>
-<span class="sourceLineNo">397</span>      LOG.debug("written " + puts.size() + " rows for bulk load of " + tabName);<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    }<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  }<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  /*<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   * Removes rows recording bulk loaded hfiles from backup table<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   * @param lst list of table names<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * @param rows the rows to be deleted<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   */<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  public void deleteBulkLoadedRows(List&lt;byte[]&gt; rows) throws IOException {<a name="line.406"></a>
-<span class="sourceLineNo">407</span>    try (Table table = connection.getTable(bulkLoadTableName)) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      List&lt;Delete&gt; lstDels = new ArrayList&lt;&gt;();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      for (byte[] row : rows) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>        Delete del = new Delete(row);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>        lstDels.add(del);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        LOG.debug("orig deleting the row: " + Bytes.toString(row));<a name="line.412"></a>
-<span class="sourceLineNo">413</span>      }<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      table.delete(lstDels);<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      LOG.debug("deleted " + rows.size() + " original bulkload rows");<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    }<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  }<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  /*<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * Reads the rows from backup table recording bulk loaded hfiles<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @param tableList list of table names<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   * @return The keys of the Map are table, region and column family. Value of the map reflects<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   * whether the hfile was recorded by preCommitStoreFile hook (true)<a name="line.423"></a>
-<span class="sourceLineNo">424</span>   */<a name="line.424"></a>
-<span class="sourceLineNo">425</span>  public Pair&lt;Map&lt;TableName, Map&lt;String, Map&lt;String, List&lt;Pair&lt;String, Boolean&gt;&gt;&gt;&gt;&gt;, List&lt;byte[]&gt;&gt;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    readBulkloadRows(List&lt;TableName&gt; tableList) throws IOException {<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    Map&lt;TableName, Map&lt;String, Map&lt;String, List&lt;Pair&lt;String, Boolean&gt;&gt;&gt;&gt;&gt; map = new HashMap&lt;&gt;();<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    List&lt;byte[]&gt; rows = new ArrayList&lt;&gt;();<a name="line.429"></a>
-<span class="sourceLineNo">430</span>    for (TableName tTable : tableList) {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      Scan scan = BackupSystemTable.createScanForOrigBulkLoadedFiles(tTable);<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      Map&lt;String, Map&lt;String, List&lt;Pair&lt;String, Boolean&gt;&gt;&gt;&gt; tblMap = map.get(tTable);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>      try (Table table = connection.getTable(bulkLoadTableName);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>          ResultScanner scanner = table.getScanner(scan)) {<a name="line.434"></a>
-<span class="sourceLineNo">435</span>        Result res = null;<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        while ((res = scanner.next()) != null) {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>          res.advance();<a name="line.437"></a>
-<span class="sourceLineNo">438</span>          String fam = null;<a name="line.438"></a>
-<span class="sourceLineNo">439</span>          String path = null;<a name="line.439"></a>
-<span class="sourceLineNo">440</span>          boolean raw = false;<a name="line.440"></a>
-<span class="sourceLineNo">441</span>          byte[] row;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>          String region = null;<a name="line.442"></a>
-<span class="sourceLineNo">443</span>          for (Cell cell : res.listCells()) {<a name="line.443"></a>
-<span class="sourceLineNo">444</span>            row = CellUtil.cloneRow(cell);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>            rows.add(row);<a name="line.445"></a>
-<span class="sourceLineNo">446</span>            String rowStr = Bytes.toString(row);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>            region = BackupSystemTable.getRegionNameFromOrigBulkLoadRow(rowStr);<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            if (CellUtil.compareQualifiers(cell, BackupSystemTable.FAM_COL, 0,<a name="line.448"></a>
-<span class="sourceLineNo">449</span>              BackupSystemTable.FAM_COL.length) == 0) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>              fam = Bytes.toString(CellUtil.cloneValue(cell));<a name="line.450"></a>
-<span class="sourceLineNo">451</span>            } else if (CellUtil.compareQualifiers(cell, BackupSystemTable.PATH_COL, 0,<a name="line.451"></a>
-<span class="sourceLineNo">452</span>              BackupSystemTable.PATH_COL.length) == 0) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>              path = Bytes.toString(CellUtil.cloneValue(cell));<a name="line.453"></a>
-<span class="sourceLineNo">454</span>            } else if (CellUtil.compareQualifiers(cell, BackupSystemTable.STATE_COL, 0,<a name="line.454"></a>
-<span class="sourceLineNo">455</span>              BackupSystemTable.STATE_COL.length) == 0) {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>              byte[] state = CellUtil.cloneValue(cell);<a name="line.456"></a>
-<span class="sourceLineNo">457</span>              if (Bytes.equals(BackupSystemTable.BL_PREPARE, state)) {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>                raw = true;<a name="line.458"></a>
-<span class="sourceLineNo">459</span>              } else {<a name="line.459"></a>
-<span class="sourceLineNo">460</span>                raw = false;<a name="line.460"></a>
-<span class="sourceLineNo">461</span>              }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>            }<a name="line.462"></a>
-<span class="sourceLineNo">463</span>          }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>          if (map.get(tTable) == null) {<a name="line.464"></a>
-<span class="sourceLineNo">465</span>            map.put(tTable, new HashMap&lt;&gt;());<a name="line.465"></a>
-<span class="sourceLineNo">466</span>            tblMap = map.get(tTable);<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          }<a name="line.467"></a>
-<span class="sourceLineNo">468</span>          if (tblMap.get(region) == null) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>            tblMap.put(region, new HashMap&lt;&gt;());<a name="line.469"></a>
-<span class="sourceLineNo">470</span>          }<a name="line.470"></a>
-<span class="sourceLineNo">471</span>          Map&lt;String, List&lt;Pair&lt;String, Boolean&gt;&gt;&gt; famMap = tblMap.get(region);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          if (famMap.get(fam) == null) {<a name="line.472"></a>
-<span class="sourceLineNo">473</span>            famMap.put(fam, new ArrayList&lt;&gt;());<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          }<a name="line.474"></a>
-<span class="sourceLineNo">475</span>          famMap.get(fam).add(new Pair&lt;&gt;(path, raw));<a name="line.475"></a>
-<span class="sourceLineNo">476</span>          LOG.debug("found orig " + path + " for " + fam + " of table " + region);<a name="line.476"></a>
-<span class="sourceLineNo">477</span>        }<a name="line.477"></a>
-<span class="sourceLineNo">478</span>      }<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    return new Pair&lt;&gt;(map, rows);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>  }<a name="line.481"></a>
-<span class="sourceLineNo">482</span><a name="line.482"></a>
-<span class="sourceLineNo">483</span>  /*<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * @param sTableList List of tables<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * @param maps array of Map of family to List of Paths<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * @param backupId the backup Id<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   */<a name="line.487"></a>
-<span class="sourceLineNo">488</span>  public void writeBulkLoadedFiles(List&lt;TableName&gt; sTableList, Map&lt;byte[], List&lt;Path&gt;&gt;[] maps,<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      String backupId) throws IOException {<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    try (Table table = connection.getTable(bulkLoadTableName)) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      long ts = EnvironmentEdgeManager.currentTime();<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      int cnt = 0;<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      List&lt;Put&gt; puts = new ArrayList&lt;&gt;();<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      for (int idx = 0; idx &lt; maps.length; idx++) {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>        Map&lt;byte[], List&lt;Path&gt;&gt; map = maps[idx];<a name="line.495"></a>
-<span class="sourceLineNo">496</span>        TableName tn = sTableList.get(idx);<a name="line.496"></a>
-<span class="sourceLineNo">497</span><a name="line.497"></a>
-<span class="sourceLineNo">498</span>        if (map == null) {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          continue;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>        }<a name="line.500"></a>
-<span class="sourceLineNo">501</span><a name="line.501"></a>
-<span class="sourceLineNo">502</span>        for (Map.Entry&lt;byte[], List&lt;Path&gt;&gt; entry : map.entrySet()) {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>          byte[] fam = entry.getKey();<a name="line.503"></a>
-<span class="sourceLineNo">504</span>          List&lt;Path&gt; paths = entry.getValue();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          for (Path p : paths) {<a name="line.505"></a>
-<span class="sourceLineNo">506</span>            Put put = BackupSystemTable.createPutForBulkLoadedFile(tn, fam, p.toString(), backupId,<a name="line.506"></a>
-<span class="sourceLineNo">507</span>              ts, cnt++);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>            puts.add(put);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          }<a name="line.509"></a>
-<span class="sourceLineNo">510</span>        }<a name="line.510"></a>
-<span class="sourceLineNo">511</span>      }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      if (!puts.isEmpty()) {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        table.put(puts);<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      }<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>  }<a name="line.516"></a>
-<span class="sourceLineNo">517</span><a name="line.517"></a>
-<span class="sourceLineNo">518</span>  /**<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   * Reads backup status object (instance of backup info) from backup system table table<a name="line.519"></a>
-<span class="sourceLineNo">520</span>   * @param backupId backup id<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * @return Current status of backup session or null<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   */<a name="line.522"></a>
-<span class="sourceLineNo">523</span>  public BackupInfo readBackupInfo(String backupId) throws IOException {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    if (LOG.isTraceEnabled()) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>      LOG.trace("read backup status from backup system table for: " + backupId);<a name="line.525"></a>
-<span class="sourceLineNo">526</span>    }<a name="line.526"></a>
-<span class="sourceLineNo">527</span><a name="line.527"></a>
-<span class="sourceLineNo">528</span>    try (Table table = connection.getTable(tableName)) {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      Get get = createGetForBackupInfo(backupId);<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      Result res = table.get(get);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      if (res.isEmpty()) {<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        return null;<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      }<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      return resultToBackupInfo(res);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>  }<a name="line.536"></a>
-<span class="sourceLineNo">537</span><a name="line.537"></a>
-<span class="sourceLineNo">538</span>  /**<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * Read the last backup start code (timestamp) of last successful backup. Will return null if<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * there is no start code stored on hbase or the value is of length 0. These two cases indicate<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   * there is no successful backup completed so far.<a name="line.541"></a>
-<span class="sourceLineNo">542</span>   * @param backupRoot directory path to backup destination<a name="line.542"></a>
-<span class="sourceLineNo">543</span>   * @return the timestamp of last successful backup<a name="line.543"></a>
-<span class="sourceLineNo">544</span>   * @throws IOException exception<a name="line.544"></a>
-<span class="sourceLineNo">545</span>   */<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  public String readBackupStartCode(String backupRoot) throws IOException {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    LOG.trace("read backup start code from backup system table");<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>    try (Table table = connection.getTable(tableName)) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      Get get = createGetForStartCode(backupRoot);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      Result res = table.get(get);<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      if (res.isEmpty()) {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>        return null;<a name="line.553"></a>
-<span class="sourceLineNo">554</span>      }<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      Cell cell = res.listCells().get(0);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      byte[] val = CellUtil.cloneValue(cell);<a name="line.556"></a>
-<span class="sourceLineNo">557</span>      if (val.length == 0) {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>        return null;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      return new String(val);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Write the start code (timestamp) to backup system table. If passed in null, then write 0 byte.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param startCode start code<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   * @param backupRoot root directory path to backup<a name="line.567"></a>
-<span class="sourceLineNo">568</span>   * @throws IOException exception<a name="line.568"></a>
-<span class="sourceLineNo">569</span>   */<a name="line.569"></a>
-<span class="sourceLineNo">570</span>  public void writeBackupStartCode(Long startCode, String backupRoot) throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    if (LOG.isTraceEnabled()) {<a name="line.571"></a>
-<span class="sourceLineNo">572</span>      LOG.trace("write backup start code to backup system table " + startCode);<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    }<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    try (Table table = connection.getTable(tableName)) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      Put put = createPutForStartCode(startCode.toString(), backupRoot);<a name="line.575"></a>
-<span class="sourceLineNo">576</span>      table.put(put);<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    }<a name="line.577"></a>
-<span class="sourceLineNo">578</span>  }<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>  /**<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   * Exclusive operations are: create, delete, merge<a name="line.581"></a>
-<span class="sourceLineNo">582</span>   * @throws IOException if a table operation fails or an active backup exclusive operation is<a name="line.582"></a>
-<span class="sourceLineNo">583</span>   *           already underway<a name="line.583"></a>
-<span class="sourceLineNo">584</span>   */<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  public void startBackupExclusiveOperation() throws IOException {<a name="line.585"></a>
-<span class="sourceLineNo">586</span>    LOG.debug("Start new backup exclusive operation");<a name="line.586"></a>
-<span class="sourceLineNo">587</span><a name="line.587"></a>
-<span class="sourceLineNo">588</span>    try (Table table = connection.getTable(tableName)) {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      Put put = createPutForStartBackupSession();<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      // First try to put if row does not exist<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      if (!table.checkAndMutate(ACTIVE_SESSION_ROW, SESSIONS_FAMILY).qualifier(ACTIVE_SESSION_COL)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>          .ifNotExists().thenPut(put)) {<a name="line.592"></a>
-<span class="sourceLineNo">593</span>        // Row exists, try to put if value == ACTIVE_SESSION_NO<a name="line.593"></a>
-<span class="sourceLineNo">594</span>        if (!table.checkAndMutate(ACTIVE_SESSION_ROW, SESSIONS_FAMILY).qualifier(ACTIVE_SESSION_COL)<a name="line.594"></a>
-<span class="sourceLineNo">595</span>            .ifEquals(ACTIVE_SESSION_NO).thenPut(put)) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>          throw new ExclusiveOperationException();<a name="line.596"></a>
-<span class="sourceLineNo">597</span>        }<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      }<a name="line.598"></a>
-<span class="sourceLineNo">599</span>    }<a name="line.599"></a>
-<span class="sourceLineNo">600</span>  }<a name="line.600"></a>
-<span class="sourceLineNo">601</span><a name="line.601"></a>
-<span class="sourceLineNo">602</span>  private Put createPutForStartBackupSession() {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    Put put = new Put(ACTIVE_SESSION_ROW);<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    put.addColumn(SESSIONS_FAMILY, ACTIVE_SESSION_COL, ACTIVE_SESSION_YES);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>    return put;<a name="line.605"></a>
-<span class="sourceLineNo">606</span>  }<a name="line.606"></a>
-<span class="sourceLineNo">607</span><a name="line.607"></a>
-<span class="sourceLineNo">608</span>  public void finishBackupExclusiveOperation() throws IOException {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    LOG.debug("Finish backup exclusive operation");<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>    try (Table table = connection.getTable(tableName)) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      Put put = createPutForStopBackupSession();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      if (!table.checkAndMutate(ACTIVE_SESSION_ROW, SESSIONS_FAMILY).qualifier(ACTIVE_SESSION_COL)<a name="line.613"></a>
-<span class="sourceLineNo">614</span>          .ifEquals(ACTIVE_SESSION_YES).thenPut(put)) {<a name="line.614"></a>
-<span class="sourceLineNo">615</span>        throw new IOException("There is no active backup exclusive operation");<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      }<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>  }<a name="line.618"></a>
-<span class="sourceLineNo">619</span><a name="line.619"></a>
-<span class="sourceLineNo">620</span>  private Put createPutForStopBackupSession() {<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    Put put = new Put(ACTIVE_SESSION_ROW);<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    put.addColumn(SESSIONS_FAMILY, ACTIVE_SESSION_COL, ACTIVE_SESSION_NO);<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    return put;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>  /**<a name="line.626"></a>
-<span class="sourceLineNo">627</span>   * Get the Region Servers log information after the last log roll from backup system table.<a name="line.627"></a>
-<span class="sourceLineNo">628</span>   * @param backupRoot root directory path to backup<a name="line.628"></a>
-<span class="sourceLineNo">629</span>   * @return RS log info<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * @throws IOException exception<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   */<a name="line.631"></a>
-<span class="sourceLineNo">632</span>  public HashMap&lt;String, Long&gt; readRegionServerLastLogRollResult(String backupRoot)<a name="line.632"></a>
-<span class="sourceLineNo">633</span>      throws IOException {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    LOG.trace("read region server last roll log result to backup system table");<a name="line.634"></a>
-<span class="sourceLineNo">635</span><a name="line.635"></a>
-<span class="sourceLineNo">636</span>    Scan scan = createScanForReadRegionServerLastLogRollResult(backupRoot);<a name="line.636"></a>
-<span class="sourceLineNo">637</span><a name="line.637"></a>
-<span class="sourceLineNo">638</span>    try (Table table = connection.getTable(tableName);<a name="line.638"></a>
-<span class="sourceLineNo">639</span>        ResultScanner scanner = table.getScanner(scan)) {<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      Result res;<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      HashMap&lt;String, Long&gt; rsTimestampMap = new HashMap&lt;&gt;();<a name="line.641"></a>
-<span class="sourceLineNo">642</span>      while ((res = scanner.next()) != null) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>        res.advance();<a name="line.643"></a>
-<span class="sourceLineNo">644</span>        Cell cell = res.current();<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        byte[] row = CellUtil.cloneRow(cell);<a name="line.645"></a>
-<span class="sourceLineNo">646</span>        String server = getServerNameForReadRegionServerLastLogRollResult(row);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        byte[] data = CellUtil.cloneValue(cell);<a name="line.647"></a>
-<span class="sourceLineNo">648</span>        rsTimestampMap.put(server, Bytes.toLong(data));<a name="line.648"></a>
-<span class="sourceLineNo">649</span>      }<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      return rsTimestampMap;<a name="line.650"></a>
-<span class="sourceLineNo">651</span>    }<a name="line.651"></a>
-<span class="sourceLineNo">652</span>  }<a name="line.652"></a>
-<span class="sourceLineNo">653</span><a name="line.653"></a>
-<span class="sourceLineNo">654</span>  /**<a name="line.654"></a>
-<span class="sourceLineNo">655</span>   * Writes Region Server last roll log result (timestamp) to backup system table table<a name="line.655"></a>
-<span class="sourceLineNo">656</span>   * @param server Region Server name<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   * @param ts last log timestamp<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * @param backupRoot root directory path to backup<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   * @throws IOException exception<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  public void writeRegionServerLastLogRollResult(String server, Long ts, String backupRoot)<a name="line.661"></a>
-<span class="sourceLineNo">662</span>      throws IOException {<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    LOG.trace("write region server last roll log result to backup system table");<a name="line.663"></a>
-<span class="sourceLineNo">664</span><a name="line.664"></a>
-<span class="sourceLineNo">665</span>    try (Table table = connection.getTable(tableName)) {<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      Put put = createPutForRegionServerLastLogRollResult(server, ts, backupRoot);<a name="line.666"></a>
-<span class="sourceLineNo">667</span>      table.put(put);<a name="line.667"></a>
-<span class="sourceLineNo">668</span>    }<a name="line.668"></a>
-<span class="sourceLineNo">669</span>  }<a name="line.669"></a>
-<span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>  /**<a name="line.671"></a>
-<span class="sourceLineNo">672</span>   * Get all completed backup information (in desc order by time)<a name="line.672"></a>
-<span class="sourceLineNo">673</span>   * @param onlyCompleted true, if only successfully completed sessions<a name="line.673"></a>
-<span class="sourceLineNo">674</span>   * @return history info of BackupCompleteData<a name="line.674"></a>
-<span class="sourceLineNo">675</span>   * @throws IOException exception<a name="line.675"></a>
-<span class="sourceLineNo">676</span>   */<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  public ArrayList&lt;BackupInfo&gt; getBackupHistory(boolean onlyCompleted) throws IOException {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    LOG.trace("get backup history from backup system table");<a name="line.678"></a>
-<span class="sourceLineNo">679</span><a name="line.679"></a>
-<span class="sourceLineNo">680</span>    BackupState state = onlyCompleted ? BackupState.COMPLETE : BackupState.ANY;<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    ArrayList&lt;BackupInfo&gt; list = getBackupInfos(state);<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    return BackupUtils.sortHistoryListDesc(list);<a name="line.682"></a>
-<span class="sourceLineNo">683</span>  }<a name="line.683"></a>
-<span class="sourceLineNo">684</span><a name="line.684"></a>
-<span class="sourceLineNo">685</span>  /**<a name="line.685"></a>
-<span class="sourceLineNo">686</span>   * Get all backups history<a name="line.686"></a>
-<span class="sourceLineNo">687</span>   * @return list of backup info<a name="line.687"></a>
-<span class="sourceLineNo">688</span>   * @throws IOException if getting the backup history fails<a name="line.688"></a>
-<span class="sourceLineNo">689</span>   */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public List&lt;BackupInfo&gt; getBackupHistory() throws IOException {<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    return getBackupHistory(false);<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>  /**<a name="line.694"></a>
-<span class="sourceLineNo">695</span>   * Get first n backup history records<a name="line.695"></a>
-<span class="sourceLineNo">696</span>   * @param n number of records, if n== -1 - max number is ignored<a name="line.696"></a>
-<span class="sourceLineNo">697</span>   * @return list of records<a name="line.697"></a>
-<span class="sourceLineNo">698</span>   * @throws IOException if getting the backup history fails<a name="line.698"></a>
-<span class="sourceLineNo">699</span>   */<a name="line.699"></a>
-<span class="sourceLineNo">700</span>  public List&lt;BackupInfo&gt; getHistory(int n) throws IOException {<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    List&lt;BackupInfo&gt; history = getBackupHistory();<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    if (n == -1 || history.size() &lt;= n) {<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      return history;<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    return Collections.unmodifiableList(history.subList(0, n));<a name="line.705"></a>
-<span class="sourceLineNo">706</span>  }<a name="line.706"></a>
-<span class="sourceLineNo">707</span><a name="line.707"></a>
-<span class="sourceLineNo">708</span>  /**<a name="line.708"></a>
-<span class="sourceLineNo">709</span>   * Get backup history records filtered by list of filters.<a name="line.709"></a>
-<span class="sourceLineNo">710</span>   * @param n max number of records, if n == -1 , then max number is ignored<a name="line.710"></a>
-<span class="sourceLineNo">711</span>   * @param filters list of filters<a name="line.711"></a>
-<span class="sourceLineNo">712</span>   * @return backup records<a name="line.712"></a>
-<span class="sourceLineNo">713</span>   * @throws IOException if getting the backup history fails<a name="line.713"></a>
-<span class="sourceLineNo">714</span>   */<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  public List&lt;BackupInfo&gt; getBackupHistory(int n, BackupInfo.Filter... filters) throws IOException {<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    if (filters.length == 0) {<a name="line.716"></a>
-<span class="sourceLineNo">717</span>      return getHistory(n);<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    }<a name="line.718"></a>
-<span class="sourceLineNo">719</span><a name="line.719"></a>
-<span class="sourceLineNo">720</span>    List&lt;BackupInfo&gt; history = getBackupHistory();<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    List&lt;BackupInfo&gt; result = new ArrayList&lt;&gt;();<a name="line.721"></a>
-<span class="sourceLineNo">722</span>    for (BackupInfo bi : history) {<a name="line.722"></a>
-<span class="sourceLineNo">723</span>      if (n &gt;= 0 &amp;&amp; result.size() == n) {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>        break;<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      }<a name="line.725"></a>
-<span class="sourceLineNo">726</span><a name="line.726"></a>
-<span class="sourceLineNo">727</span>      boolean passed = true;<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      for (int i = 0; i &lt; filters.length; i++) {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>        if (!filters[i].apply(bi)) {<a name="line.729"></a>
-<span class="sourceLineNo">730</span>          passed = false;<a name="line.730"></a>
-<span class="sourceLineNo">731</span>          break;<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        }<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      }<a name="line.733"></a>
-<span class="sourceLineNo">734</span>      if (passed) {<a name="line.734"></a>
-<span class="sourceLineNo">735</span>        result.add(bi);<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      }<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    }<a name="line.737"></a>
-<span class="sourceLineNo">738</span>    return result;<a name="line.738"></a>
-<span class="sourceLineNo">739</span>  }<a name="line.739"></a>
-<span class="sourceLineNo">740</span><a name="line.740"></a>
-<span class="sourceLineNo">741</span>  /*<a name="line.741"></a>
-<span class="sourceLineNo">742</span>   * Retrieve TableName's for completed backup of given type<a name="line.742"></a>
-<span class="sourceLineNo">743</span>   * @param type backup type<a name="line.743"></a>
-<span class="sourceLineNo">744</span>   * @return List of table names<a name="line.744"></a>
-<span class="sourceLineNo">745</span>   */<a name="line.745"></a>
-<span class="sourceLineNo">746</span>  public List&lt;TableName&gt; getTablesForBackupType(BackupType type) throws IOException {<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    Set&lt;TableName&gt; names = new HashSet&lt;&gt;();<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    List&lt;BackupInfo&gt; infos = getBackupHistory(true);<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    for (BackupInfo info : infos) {<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      if (info.getType() == type) {<a name="line.750"></a>
-<span class="sourceLineNo">751</span>        names.addAll(info.getTableNames());<a name="line.751"></a>
-<span class="sourceLineNo">752</span>      }<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    }<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    return new ArrayList&lt;&gt;(names);<a name="line.754"></a>
-<span class="sourceLineNo">755</span>  }<a name="line.755"></a>
-<span class="sourceLineNo">756</span><a name="line.756"></a>
-<span class="sourceLineNo">757</span>  /**<a name="line.757"></a>
-<span class="sourceLineNo">758</span>   * Get history for backup destination<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   * @param backupRoot backup destination path<a name="line.759"></a>
-<span class="sourceLineNo">760</span>   * @return List of backup info<a name="line.760"></a>
-<span class="sourceLineNo">761</span>   * @throws IOException if getting the backup history fails<a name="line.761"></a>
-<span class="sourceLineNo">762</span>   */<a name="line.762"></a>
-<span class="sourceLineNo">763</span>  public List&lt;BackupInfo&gt; getBackupHistory(String backupRoot) throws IOException {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    ArrayList&lt;BackupInfo&gt; history = getBackupHistory(false);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    for (Iterator&lt;BackupInfo&gt; iterator = history.iterator(); iterator.hasNext();) {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>      BackupInfo info = iterator.next();<a name="line.766"></a>
-<span class="sourceLineNo">767</span>      if (!backupRoot.equals(info.getBackupRootDir())) {<a name="line.767"></a>
-<span class="sourceLineNo">768</span>        iterator.remove();<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      }<a name="line.769"></a>
-<span class="sourceLineNo">770</span>    }<a name="line.770"></a>
-<span class="sourceLineNo">771</span>    return history;<a name="line.771"></a>
-<span class="sourceLineNo">772</span>  }<a name="line.772"></a>
-<span class="sourceLineNo">773</span><a name="line.773"></a>
-<span class="sourceLineNo">774</span>  /**<a name="line.774"></a>
-<span class="sourceLineNo">775</span>   * Get history for a table<a name="line.775"></a>
-<span class="sourceLineNo">776</span>   * @param name table name<a name="line.776"></a>
-<span class="sourceLineNo">777</span>   * @return history for a table<a name="line.777"></a>
-<span class="sourceLineNo">778</span>   * @throws IOException if getting the backup history fails<a name="line.778"></a>
-<span class="sourceLineNo">779</span>   */<a name="line.779"></a>
-<span class="sourceLineNo">780</span>  public List&lt;BackupInfo&gt; getBackupHistoryForTable(TableName name) throws IOException {<a name="line.780"></a>
-<span class="sourceLineNo">781</span>    List&lt;BackupInfo&gt; history = getBackupHistory();<a name="line.781"></a>
-<span class="sourceLineNo">782</span>    List&lt;BackupInfo&gt; tableHistory = new ArrayList&lt;&gt;();<a name="line.782"></a>
-<span class="sourceLineNo">783</span>    for (BackupInfo info : history) {<a name="line.783"></a>
-<span class="sourceLineNo">784</span>      List&lt;TableName&gt; tables = info.getTableNames();<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      if (tables.contains(name)) {<a name="line.785"></a>
-<span class="sourceLineNo">786</span>        tableHistory.add(info);<a name="line.786"></a>
-<span class="sourceLineNo">787</span>      }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>    }<a name="line.788"></a>
-<span class="sourceLineNo">789</span>    return tableHistory;<a name="line.789"></a>
-<span class="sourceLineNo">790</span>  }<a name="line.790"></a>
-<span class="sourceLineNo">791</span><a name="line.791"></a>
-<span class="sourceLineNo">792</span>  public Map&lt;TableName, ArrayList&lt;BackupInfo&gt;&gt; getBackupHistoryForTableSet(Set&lt;TableName&gt; set,<a name="line.792"></a>
-<span class="sourceLineNo">793</span>      String backupRoot) throws IOException {<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    List&lt;BackupInfo&gt; history = getBackupHistory(backupRoot);<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    Map&lt;TableName, ArrayList&lt;BackupInfo&gt;&gt; tableHistoryMap = new HashMap&lt;&gt;();<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    for (Iterator&lt;BackupInfo&gt; iterator = history.iterator(); iterator.hasNext();) {<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      BackupInfo info = iterator.next();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>      if (!backupRoot.equals(info.getBackupRootDir())) {<a name="line.798"></a>
-<span class="sourceLineNo">799</span>        continue;<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      }<a name="line.800"></a>
-<span class="sourceLineNo">801</span>      List&lt;TableName&gt; tables = info.getTableNames();<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      for (TableName tableName : tables) {<a name="line.802"></a>
-<span class="sourceLineNo">803</span>        if (set.contains(tableName)) {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>          ArrayList&lt;BackupInfo&gt; list = tableHistoryMap.get(tableName);<a name="line.804"></a>
-<span class="sourceLineNo">805</span>          if (list == null) {<a name="line.805"></a>
-<span class="sourceLineNo">806</span>            list = new ArrayList&lt;&gt;();<a name="line.806"></a>
-<span class="sourceLineNo">807</span>            tableHistoryMap.put(tableName, list);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>          }<a name="line.808"></a>
-<span class="sourceLineNo">809</span>          list.add(info);<a name="line.809"></a>
-<span class="sourceLineNo">810</span>        }<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      }<a name="line.811"></a>
-<span class="sourceLineNo">812</span>    }<a name="line.812"></a>
-<span class="sourceLineNo">813</span>    return tableHistoryMap;<a name="line.813"></a>
-<span class="sourceLineNo">814</span>  }<a name="line.814"></a>
-<span class="sourceLineNo">815</span><a name="line.815"></a>
-<span class="sourceLineNo">816</span>  /**<a name="line.816"></a>
-<span class="sourceLineNo">817</span>   * Get all backup sessions with a given state (in descending order by time)<a name="line.817"></a>
-<span class="sourceLineNo">818</span>   * @param state backup session state<a name="line.818"></a>
-<span class="sourceLineNo">819</span>   * @return history info of backup info objects<a name="line.819"></a>
-<span class="sourceLineNo">820</span>   * @throws IOException exception<a name="line.820"></a>
-<span class="sourceLineNo">821</span>   */<a name="line.821"></a>
-<span class="sourceLineNo">822</span>  public ArrayList&lt;BackupInfo&gt; getBackupInfos(BackupState state) throws IOException {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>    LOG.trace("get backup infos from backup system table");<a name="line.823"></a>
-<span class="sourceLineNo">824</span><a name="line.824"></a>
-<span class="sourceLineNo">825</span>    Scan scan = createScanForBackupHistory();<a name="line.825"></a>
-<span class="sourceLineNo">826</span>    ArrayList&lt;BackupInfo&gt; list = new ArrayList&lt;&gt;();<a name="line.826"></a>
-<span class="sourceLineNo">827</span><a name="line.827"></a>
-<span class="sourceLineNo">828</span>    try (Table table = connection.getTable(tableName);<a name="line.828"></a>
-<span class="sourceLineNo">829</span>        ResultScanner scanner = table.getScanner(scan)) {<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      Result res;<a name="line.830"></a>
-<span class="sourceLineNo">831</span>      while ((res = scanner.next()) != null) {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>        res.advance();<a name="line.832"></a>
-<span class="sourceLineNo">833</span>        BackupInfo context = cellToBackupInfo(res.current());<a name="line.833"></a>
-<span class="sourceLineNo">834</span>        if (state != BackupState.ANY &amp;&amp; context.getState() != state) {<a name="line.834"></a>
-<span class="sourceLineNo">835</span>          continue;<a name="line.835"></a>
-<span class="sourceLineNo">836</span>        }<a name="line.836"></a>
-<span class="sourceLineNo">837</span>        list.add(context);<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      }<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      return list;<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
-<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
-<span class="sourceLineNo">842</span><a name="line.842"></a>
-<span class="sourceLineNo">843</span>  /**<a name="line.843"></a>
-<span class="sourceLineNo">844</span>   * Write the current timestamps for each regionserver to backup system table after a successful<a name="line.844"></a>
-<span class="sourceLineNo">845</span>   * full or incremental backup. The saved timestamp is of the last log file that was backed up<a name="line.845"></a>
-<span class="sourceLineNo">846</span>   * already.<a name="line.846"></a>
-<span class="sourceLineNo">847</span>   * @param tables tables<a name="line.847"></a>
-<span class="sourceLineNo">848</span>   * @param newTimestamps timestamps<a name="line.848"></a>
-<span class="sourceLineNo">849</span>   * @param backupRoot root directory path to backup<a name="line.849"></a>
-<span class="sourceLineNo">850</span>   * @throws IOException exception<a name="line.850"></a>
-<span class="sourceLineNo">851</span>   */<a name="line.851"></a>
-<span class="sourceLineNo">852</span>  public void writeRegionServerLogTimestamp(Set&lt;TableName&gt; tables,<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      HashMap&lt;String, Long&gt; newTimestamps, String backupRoot) throws IOException {<a name="line.853"></a>
-<span class="sourceLineNo">854</span>    if (LOG.isTraceEnabled()) {<a name="line.854"></a>
-<span class="sourceLineNo">855</span>      LOG.trace("write RS log time stamps to backup system table for tables ["<a name="line.855"></a>
-<span class="sourceLineNo">856</span>          + StringUtils.join(tables, ",") + "]");<a name="line.856"></a>
-<span class="sourceLineNo">857</span>    }<a name="line.857"></a>
-<span class="sourceLineNo">858</span>    List&lt;Put&gt; puts = new ArrayList&lt;&gt;();<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    for (TableName table : tables) {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>      byte[] smapData = toTableServerTimestampProto(table, newTimestamps).toByteArray();<a name="line.860"></a>
-<span class="sourceLineNo">861</span>      Put put = createPutForWriteRegionServerLogTimestamp(table, smapData, backupRoot);<a name="line.861"></a>
-<span class="sourceLineNo">862</span>      puts.add(put);<a name="line.862"></a>
-<span class="sourceLineNo">863</span>    }<a name="line.863"></a>
-<span class="sourceLineNo">864</span>    try (Table table = connection.getTable(tableName)) {<a name="line.864"></a>
-<span class="sourceLineNo">865</span>      table.put(puts);<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    }<a name="line.866"></a>
-<span class="sourceLineNo">867</span>  }<a name="line.867"></a>
-<span class="sourceLineNo">868</span><a name="line.868"></a>
-<span class="sourceLineNo">869</span>  /**<a name="line.869"></a>
-<span class="sourceLineNo">870</span>   * Read the timestamp for each region server log after the last successful backup. Each table has<a name="line.870"></a>
-<span class="sourceLineNo">871</span>   * its own set of the timestamps. The info is stored for each table as a concatenated string of<a name="line.871"></a>
-<span class="sourceLineNo">872</span>   * rs-&gt;timestapmp<a name="line.872"></a>
-<span class="sourceLineNo">873</span>   * @param backupRoot root directory path to backup<a name="line.873"></a>
-<span class="sourceLineNo">874</span>   * @return the timestamp for each region server. key: tableName value:<a name="line.874"></a>
-<span class="sourceLineNo">875</span>   *         RegionServer,PreviousTimeStamp<a name="line.875"></a>
-<span class="sourceLineNo">876</span>   * @throws IOException exception<a name="line.876"></a>
-<span class="sourceLineNo">877</span>   */<a name="line.877"></a>
-<span class="sourceLineNo">878</span>  public HashMap&lt;TableName, HashMap&lt;String, Long&gt;&gt; readLogTimestampMap(String backupRoot)<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      throws IOException {<a name="line.879"></a>
-<span class="sourceLineNo">880</span>    if (LOG.isTraceEnabled()) {<a name="line.880"></a>
-<span class="sourceLineNo">881</span>      LOG.trace("read RS log ts from backup system table for root=" + backupRoot);<a name="line.881"></a>
-<span class="sourceLineNo">882</span>    }<a name="line.882"></a>
-<span class="sourceLineNo">883</span><a name="line.883"></a>
-<span class="sourceLineNo">884</span>    HashMap&lt;TableName, HashMap&lt;String, Long&gt;&gt; tableTimestampMap = new HashMap&lt;&gt;();<a name="line.884"></a>
-<span class="sourceLineNo">885</span><a name="line.885"></a>
-<span class="sourceLineNo">886</span>    Scan scan = createScanForReadLogTimestampMap(backupRoot);<a name="line.886"></a>
-<span class="sourceLineNo">887</span>    try (Table table = connection.getTable(tableName);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>        ResultScanner scanner = table.getScanner(scan)) {<a name="line.888"></a>
-<span class="sourceLineNo">889</span>      Result res;<a name="line.889"></a>
-<span class="sourceLineNo">890</span>      while ((res = scanner.next()) != null) {<a name="line.890"></a>
-<span class="sourceLineNo">891</span>        res.advance();<a name="line.891"></a>
-<span class="sourceLineNo">892</span>        Cell cell = res.current();<a name="line.892"></a>
-<span class="sourceLineNo">893</span>        byte[] row = CellUtil.cloneRow(cell);<a name="line.893"></a>
-<span class="sourceLineNo">894</span>        String tabName = getTableNameForReadLogTimestampMap(row);<a name="line.894"></a>
-<span class="sourceLineNo">895</span>        TableName tn = TableName.valueOf(tabName);<a name="line.895"></a>
-<span class="sourceLineNo">896</span>        byte[] data = CellUtil.cloneValue(cell);<a name="line.896"></a>
-<span class="sourceLineNo">897</span>        if (data == null) {<a name="line.897"></a>
-<span class="sourceLineNo">898</span>          throw new IOException("Data of last backup data from backup system table "<a name="line.898"></a>
-<span class="sourceLineNo">899</span>              + "is empty. Create a backup first.");<a name="line.899"></a>
-<span class="sourceLineNo">900</span>        }<a name="line.900"></a>
-<span class="sourceLineNo">901</span>        if (data != null &amp;&amp; data.length &gt; 0) {<a name="line.901"></a>
-<span class="sourceLineNo">902</span>          HashMap&lt;String, Long&gt; lastBackup =<a name="line.902"></a>
-<span class="sourceLineNo">903</span>              fromTableServerTimestampProto(BackupProtos.TableServerTimestamp.parseFrom(data));<a name="line.903"></a>
-<span class="sourceLineNo">904</span>          tableTimestampMap.put(tn, lastBackup);<a name="line.904"></a>
-<span class="sourceLineNo">905</span>        }<a name="line.905"></a>
-<span class="sourceLineNo">906</span>      }<a name="line.906"></a>
-<span class="sourceLineNo">907</span>      return tableTimestampMap;<a name="line.907"></a>
-<span class="sourceLineNo">908</span>    }<a name="line.908"></a>
-<span class="sourceLineNo">909</span>  }<a name="line.909"></a>
-<span class="sourceLineNo">910</span><a name="line.910"></a>
-<span class="sourceLineNo">911</span>  private BackupProtos.TableServerTimestamp toTableServerTimestampProto(TableName table,<a name="line.911"></a>
-<span class="sourceLineNo">912</span>      Map&lt;String, Long&gt; map) {<a name="line.912"></a>
-<span class="sourceLineNo">913</span>    BackupProtos.TableServerTimestamp.Builder tstBuilder =<a name="line.913"></a>
-<span class="sourceLineNo">914</span>        BackupProtos.TableServerTimestamp.newBuilder();<a name="line.914"></a>
-<span class="sourceLineNo">915</span>    tstBuilder<a name="line.915"></a>
-<span class="sourceLineNo">916</span>    .setTableName(org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil.toProtoTableName(table));<a name="line.916"></a>
-<span class="sourceLineNo">917</span><a name="line.917"></a>
-<span class="sourceLineNo">918</span>    for (Entry&lt;String, Long&gt; entry : map.entrySet()) {<a name="line.918"></a>
-<span class="sourceLineNo">919</span>      BackupProtos.ServerTimestamp.Builder builder = BackupProtos.ServerTimestamp.newBuilder();<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      HBaseProtos.ServerName.Builder snBuilder = HBaseProtos.ServerName.newBuilder();<a name="line.920"></a>
-<span class="sourceLineNo">921</span>      ServerName sn = ServerName.parseServerName(entry.getKey());<a name="line.921"></a>
-<span class="sourceLineNo">922</span>      snBuilder.setHostName(sn.getHostname());<a name="line.922"></a>
-<span class="sourceLineNo">923</span>      snBuilder.setPort(sn.getPort());<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      builder.setServerName(snBuilder.build());<a name="line.924"></a>
-<span class="sourceLineNo">925</span>      builder.setTimestamp(entry.getValue());<a name="line.925"></a>
-<span class="sourceLineNo">926</span>      tstBuilder.addServerTimestamp(builder.build());<a name="line.926"></a>
-<span class="sourceLineNo">927</span>    }<a name="line.927"></a>
-<span class="sourceLineNo">928</span><a name="line.928"></a>
-<span class="sourceLineNo">929</span>    return tstBuilder.build();<a name="line.929"></a>
-<span class="sourceLineNo">930</span>  }<a name="line.930"></a>
-<span class="sourceLineNo">931</span><a name="line.931"></a>
-<span class="sourceLineNo">932</span>  private HashMap&lt;String, Long&gt;<a name="line.932"></a>
-<span class="sourceLineNo">933</span>    fromTableServerTimestampProto(BackupProtos.TableServerTimestamp proto) {<a name="line.933"></a>
-<span class="sourceLineNo">934</span><a name="line.934"></a>
-<span class="sourceLineNo">935</span>    HashMap&lt;String, Long&gt; map = new HashMap&lt;&gt;();<a name="line.935"></a>
-<span class="sourceLineNo">936</span>    List&lt;BackupProtos.ServerTimestamp&gt; list = proto.getServerTimestampList();<a name="line.936"></a>
-<span class="sourceLineNo">937</span>    for (BackupProtos.ServerTimestamp st : list) {<a name="line.937"></a>
-<span class="sourceLineNo">938</span>      ServerName sn =<a name="line.938"></a>
-<span class="sourceLineNo">939</span>          org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil.toServerName(st.getServerName());<a name="line.939"></a>
-<span class="sourceLineNo">940</span>      map.put(sn.getHostname() + ":" + sn.getPort(), st.getTimestamp());<a name="line.940"></a>
-<span class="sourceLineNo">941</span>    }<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    return map;<a name="line.942"></a>
-<span class="sourceLineNo">943</span>  }<a name="line.943"></a>
-<span class="sourceLineNo">944</span><a name="line.944"></a>
-<span class="sourceLineNo">945</span>  /**<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   * Return the current tables covered by incremental backup.<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   * @param backupRoot root directory path to backup<a name="line.947"></a>
-<span class="sourceLineNo">948</span>   * @return set of tableNames<a name="line.948"></a>
-<span class="sourceLineNo">949</span>   * @throws IOException exception<a name="line.949"></a>
-<span class="sourceLineNo">950</span>   */<a name="line.950"></a>
-<span class="sourceLineNo">951</span>  public Set&lt;TableName&gt; getIncrementalBackupTableSet(String backupRoot) throws IOException {<a name="line.951"></a>
-<span class="sourceLineNo">952</span>    LOG.trace("get incremental backup table set from backup system table");<a name="line.952"></a>
-<span class="sourceLineNo">953</span><a name="line.953"></a>
-<span class="sourceLineNo">954</span>    TreeSet&lt;TableName&gt; set = new TreeSet&lt;&gt;();<a name="line.954"></a>
-<span class="sourceLineNo">955</span><a name="line.955"></a>
-<span class="sourceLineNo">956</span>    try (Table table = connection.getTable(tableName)) {<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      Get get = createGetForIncrBackupTableSet(backupRoot);<a name="line.957"></a>
-<span class="sourceLineNo">958</span>      Result res = table.get(get);<a name="line.958"></a>
-<span class="sourceLineNo">959</span>      if (res.isEmpty()) {<a name="line.959"></a>
-<span class="sourceLineNo">960</span>        return set;<a name="line.960"></a>
-<span class="sourceLineNo">961</span>      }<a name="line.961"></a>
-<span class="sourceLineNo">962</span>      List&lt;Cell&gt; cells = res.listCells();<a name="line.962"></a>
-<span class="sourceLineNo">963</span>      for (Cell cell : cells) {<a name="line.963"></a>
-<span class="sourceLineNo">964</span>        // qualifier = table name - we use table names as qualifiers<a 

<TRUNCATED>

[12/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
index f2fd195..b293714 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
@@ -619,1696 +619,1698 @@
 <span class="sourceLineNo">611</span>    try {<a name="line.611"></a>
 <span class="sourceLineNo">612</span>      long procId =<a name="line.612"></a>
 <span class="sourceLineNo">613</span>          master.createTable(tableDescriptor, splitKeys, req.getNonceGroup(), req.getNonce());<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      return CreateTableResponse.newBuilder().setProcId(procId).build();<a name="line.614"></a>
-<span class="sourceLineNo">615</span>    } catch (IOException ioe) {<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      throw new ServiceException(ioe);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>  }<a name="line.618"></a>
-<span class="sourceLineNo">619</span><a name="line.619"></a>
-<span class="sourceLineNo">620</span>  @Override<a name="line.620"></a>
-<span class="sourceLineNo">621</span>  public DeleteColumnResponse deleteColumn(RpcController controller,<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      DeleteColumnRequest req) throws ServiceException {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    try {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      long procId = master.deleteColumn(<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        ProtobufUtil.toTableName(req.getTableName()),<a name="line.625"></a>
-<span class="sourceLineNo">626</span>        req.getColumnName().toByteArray(),<a name="line.626"></a>
-<span class="sourceLineNo">627</span>        req.getNonceGroup(),<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        req.getNonce());<a name="line.628"></a>
-<span class="sourceLineNo">629</span>      if (procId == -1) {<a name="line.629"></a>
-<span class="sourceLineNo">630</span>        // This mean operation was not performed in server, so do not set any procId<a name="line.630"></a>
-<span class="sourceLineNo">631</span>        return DeleteColumnResponse.newBuilder().build();<a name="line.631"></a>
-<span class="sourceLineNo">632</span>      } else {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>        return DeleteColumnResponse.newBuilder().setProcId(procId).build();<a name="line.633"></a>
-<span class="sourceLineNo">634</span>      }<a name="line.634"></a>
-<span class="sourceLineNo">635</span>    } catch (IOException ioe) {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      throw new ServiceException(ioe);<a name="line.636"></a>
-<span class="sourceLineNo">637</span>    }<a name="line.637"></a>
-<span class="sourceLineNo">638</span>  }<a name="line.638"></a>
-<span class="sourceLineNo">639</span><a name="line.639"></a>
-<span class="sourceLineNo">640</span>  @Override<a name="line.640"></a>
-<span class="sourceLineNo">641</span>  public DeleteNamespaceResponse deleteNamespace(RpcController controller,<a name="line.641"></a>
-<span class="sourceLineNo">642</span>      DeleteNamespaceRequest request) throws ServiceException {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>    try {<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      long procId = master.deleteNamespace(<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        request.getNamespaceName(),<a name="line.645"></a>
-<span class="sourceLineNo">646</span>        request.getNonceGroup(),<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        request.getNonce());<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      return DeleteNamespaceResponse.newBuilder().setProcId(procId).build();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    } catch (IOException e) {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      throw new ServiceException(e);<a name="line.650"></a>
-<span class="sourceLineNo">651</span>    }<a name="line.651"></a>
-<span class="sourceLineNo">652</span>  }<a name="line.652"></a>
-<span class="sourceLineNo">653</span><a name="line.653"></a>
-<span class="sourceLineNo">654</span>  /**<a name="line.654"></a>
-<span class="sourceLineNo">655</span>   * Execute Delete Snapshot operation.<a name="line.655"></a>
-<span class="sourceLineNo">656</span>   * @return DeleteSnapshotResponse (a protobuf wrapped void) if the snapshot existed and was<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   *    deleted properly.<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * @throws ServiceException wrapping SnapshotDoesNotExistException if specified snapshot did not<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   *    exist.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  @Override<a name="line.661"></a>
-<span class="sourceLineNo">662</span>  public DeleteSnapshotResponse deleteSnapshot(RpcController controller,<a name="line.662"></a>
-<span class="sourceLineNo">663</span>      DeleteSnapshotRequest request) throws ServiceException {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    try {<a name="line.664"></a>
-<span class="sourceLineNo">665</span>      master.checkInitialized();<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      master.snapshotManager.checkSnapshotSupport();<a name="line.666"></a>
-<span class="sourceLineNo">667</span><a name="line.667"></a>
-<span class="sourceLineNo">668</span>      LOG.info(master.getClientIdAuditPrefix() + " delete " + request.getSnapshot());<a name="line.668"></a>
-<span class="sourceLineNo">669</span>      master.snapshotManager.deleteSnapshot(request.getSnapshot());<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      return DeleteSnapshotResponse.newBuilder().build();<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    } catch (IOException e) {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      throw new ServiceException(e);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    }<a name="line.673"></a>
-<span class="sourceLineNo">674</span>  }<a name="line.674"></a>
-<span class="sourceLineNo">675</span><a name="line.675"></a>
-<span class="sourceLineNo">676</span>  @Override<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  public DeleteTableResponse deleteTable(RpcController controller,<a name="line.677"></a>
-<span class="sourceLineNo">678</span>      DeleteTableRequest request) throws ServiceException {<a name="line.678"></a>
-<span class="sourceLineNo">679</span>    try {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      long procId = master.deleteTable(ProtobufUtil.toTableName(<a name="line.680"></a>
-<span class="sourceLineNo">681</span>          request.getTableName()), request.getNonceGroup(), request.getNonce());<a name="line.681"></a>
-<span class="sourceLineNo">682</span>      return DeleteTableResponse.newBuilder().setProcId(procId).build();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    } catch (IOException ioe) {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      throw new ServiceException(ioe);<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    }<a name="line.685"></a>
-<span class="sourceLineNo">686</span>  }<a name="line.686"></a>
-<span class="sourceLineNo">687</span><a name="line.687"></a>
-<span class="sourceLineNo">688</span>  @Override<a name="line.688"></a>
-<span class="sourceLineNo">689</span>  public TruncateTableResponse truncateTable(RpcController controller, TruncateTableRequest request)<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      throws ServiceException {<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    try {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      long procId = master.truncateTable(<a name="line.692"></a>
-<span class="sourceLineNo">693</span>        ProtobufUtil.toTableName(request.getTableName()),<a name="line.693"></a>
-<span class="sourceLineNo">694</span>        request.getPreserveSplits(),<a name="line.694"></a>
-<span class="sourceLineNo">695</span>        request.getNonceGroup(),<a name="line.695"></a>
-<span class="sourceLineNo">696</span>        request.getNonce());<a name="line.696"></a>
-<span class="sourceLineNo">697</span>      return TruncateTableResponse.newBuilder().setProcId(procId).build();<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    } catch (IOException ioe) {<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      throw new ServiceException(ioe);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    }<a name="line.700"></a>
-<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>  @Override<a name="line.703"></a>
-<span class="sourceLineNo">704</span>  public DisableTableResponse disableTable(RpcController controller,<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      DisableTableRequest request) throws ServiceException {<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    try {<a name="line.706"></a>
-<span class="sourceLineNo">707</span>      long procId = master.disableTable(<a name="line.707"></a>
-<span class="sourceLineNo">708</span>        ProtobufUtil.toTableName(request.getTableName()),<a name="line.708"></a>
-<span class="sourceLineNo">709</span>        request.getNonceGroup(),<a name="line.709"></a>
-<span class="sourceLineNo">710</span>        request.getNonce());<a name="line.710"></a>
-<span class="sourceLineNo">711</span>      return DisableTableResponse.newBuilder().setProcId(procId).build();<a name="line.711"></a>
-<span class="sourceLineNo">712</span>    } catch (IOException ioe) {<a name="line.712"></a>
-<span class="sourceLineNo">713</span>      throw new ServiceException(ioe);<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    }<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  }<a name="line.715"></a>
-<span class="sourceLineNo">716</span><a name="line.716"></a>
-<span class="sourceLineNo">717</span>  @Override<a name="line.717"></a>
-<span class="sourceLineNo">718</span>  public EnableCatalogJanitorResponse enableCatalogJanitor(RpcController c,<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      EnableCatalogJanitorRequest req) throws ServiceException {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    rpcPreCheck("enableCatalogJanitor");<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    return EnableCatalogJanitorResponse.newBuilder().setPrevValue(<a name="line.721"></a>
-<span class="sourceLineNo">722</span>      master.catalogJanitorChore.setEnabled(req.getEnable())).build();<a name="line.722"></a>
-<span class="sourceLineNo">723</span>  }<a name="line.723"></a>
-<span class="sourceLineNo">724</span><a name="line.724"></a>
-<span class="sourceLineNo">725</span>  @Override<a name="line.725"></a>
-<span class="sourceLineNo">726</span>  public SetCleanerChoreRunningResponse setCleanerChoreRunning(<a name="line.726"></a>
-<span class="sourceLineNo">727</span>    RpcController c, SetCleanerChoreRunningRequest req) throws ServiceException {<a name="line.727"></a>
-<span class="sourceLineNo">728</span>    rpcPreCheck("setCleanerChoreRunning");<a name="line.728"></a>
-<span class="sourceLineNo">729</span><a name="line.729"></a>
-<span class="sourceLineNo">730</span>    boolean prevValue =<a name="line.730"></a>
-<span class="sourceLineNo">731</span>      master.getLogCleaner().getEnabled() &amp;&amp; master.getHFileCleaner().getEnabled();<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    master.getLogCleaner().setEnabled(req.getOn());<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    master.getHFileCleaner().setEnabled(req.getOn());<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    return SetCleanerChoreRunningResponse.newBuilder().setPrevValue(prevValue).build();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>  }<a name="line.735"></a>
-<span class="sourceLineNo">736</span><a name="line.736"></a>
-<span class="sourceLineNo">737</span>  @Override<a name="line.737"></a>
-<span class="sourceLineNo">738</span>  public EnableTableResponse enableTable(RpcController controller,<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      EnableTableRequest request) throws ServiceException {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>    try {<a name="line.740"></a>
-<span class="sourceLineNo">741</span>      long procId = master.enableTable(<a name="line.741"></a>
-<span class="sourceLineNo">742</span>        ProtobufUtil.toTableName(request.getTableName()),<a name="line.742"></a>
-<span class="sourceLineNo">743</span>        request.getNonceGroup(),<a name="line.743"></a>
-<span class="sourceLineNo">744</span>        request.getNonce());<a name="line.744"></a>
-<span class="sourceLineNo">745</span>      return EnableTableResponse.newBuilder().setProcId(procId).build();<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    } catch (IOException ioe) {<a name="line.746"></a>
-<span class="sourceLineNo">747</span>      throw new ServiceException(ioe);<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    }<a name="line.748"></a>
-<span class="sourceLineNo">749</span>  }<a name="line.749"></a>
-<span class="sourceLineNo">750</span><a name="line.750"></a>
-<span class="sourceLineNo">751</span>  @Override<a name="line.751"></a>
-<span class="sourceLineNo">752</span>  public MergeTableRegionsResponse mergeTableRegions(<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      RpcController c, MergeTableRegionsRequest request) throws ServiceException {<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    try {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      master.checkInitialized();<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    } catch (IOException ioe) {<a name="line.756"></a>
-<span class="sourceLineNo">757</span>      throw new ServiceException(ioe);<a name="line.757"></a>
-<span class="sourceLineNo">758</span>    }<a name="line.758"></a>
-<span class="sourceLineNo">759</span><a name="line.759"></a>
-<span class="sourceLineNo">760</span>    RegionStates regionStates = master.getAssignmentManager().getRegionStates();<a name="line.760"></a>
+<span class="sourceLineNo">614</span>      LOG.info(master.getClientIdAuditPrefix() + " procedure request for creating table: " +<a name="line.614"></a>
+<span class="sourceLineNo">615</span>              req.getTableSchema().getTableName() + " procId is: " + procId);<a name="line.615"></a>
+<span class="sourceLineNo">616</span>      return CreateTableResponse.newBuilder().setProcId(procId).build();<a name="line.616"></a>
+<span class="sourceLineNo">617</span>    } catch (IOException ioe) {<a name="line.617"></a>
+<span class="sourceLineNo">618</span>      throw new ServiceException(ioe);<a name="line.618"></a>
+<span class="sourceLineNo">619</span>    }<a name="line.619"></a>
+<span class="sourceLineNo">620</span>  }<a name="line.620"></a>
+<span class="sourceLineNo">621</span><a name="line.621"></a>
+<span class="sourceLineNo">622</span>  @Override<a name="line.622"></a>
+<span class="sourceLineNo">623</span>  public DeleteColumnResponse deleteColumn(RpcController controller,<a name="line.623"></a>
+<span class="sourceLineNo">624</span>      DeleteColumnRequest req) throws ServiceException {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>    try {<a name="line.625"></a>
+<span class="sourceLineNo">626</span>      long procId = master.deleteColumn(<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        ProtobufUtil.toTableName(req.getTableName()),<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        req.getColumnName().toByteArray(),<a name="line.628"></a>
+<span class="sourceLineNo">629</span>        req.getNonceGroup(),<a name="line.629"></a>
+<span class="sourceLineNo">630</span>        req.getNonce());<a name="line.630"></a>
+<span class="sourceLineNo">631</span>      if (procId == -1) {<a name="line.631"></a>
+<span class="sourceLineNo">632</span>        // This mean operation was not performed in server, so do not set any procId<a name="line.632"></a>
+<span class="sourceLineNo">633</span>        return DeleteColumnResponse.newBuilder().build();<a name="line.633"></a>
+<span class="sourceLineNo">634</span>      } else {<a name="line.634"></a>
+<span class="sourceLineNo">635</span>        return DeleteColumnResponse.newBuilder().setProcId(procId).build();<a name="line.635"></a>
+<span class="sourceLineNo">636</span>      }<a name="line.636"></a>
+<span class="sourceLineNo">637</span>    } catch (IOException ioe) {<a name="line.637"></a>
+<span class="sourceLineNo">638</span>      throw new ServiceException(ioe);<a name="line.638"></a>
+<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
+<span class="sourceLineNo">640</span>  }<a name="line.640"></a>
+<span class="sourceLineNo">641</span><a name="line.641"></a>
+<span class="sourceLineNo">642</span>  @Override<a name="line.642"></a>
+<span class="sourceLineNo">643</span>  public DeleteNamespaceResponse deleteNamespace(RpcController controller,<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      DeleteNamespaceRequest request) throws ServiceException {<a name="line.644"></a>
+<span class="sourceLineNo">645</span>    try {<a name="line.645"></a>
+<span class="sourceLineNo">646</span>      long procId = master.deleteNamespace(<a name="line.646"></a>
+<span class="sourceLineNo">647</span>        request.getNamespaceName(),<a name="line.647"></a>
+<span class="sourceLineNo">648</span>        request.getNonceGroup(),<a name="line.648"></a>
+<span class="sourceLineNo">649</span>        request.getNonce());<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      return DeleteNamespaceResponse.newBuilder().setProcId(procId).build();<a name="line.650"></a>
+<span class="sourceLineNo">651</span>    } catch (IOException e) {<a name="line.651"></a>
+<span class="sourceLineNo">652</span>      throw new ServiceException(e);<a name="line.652"></a>
+<span class="sourceLineNo">653</span>    }<a name="line.653"></a>
+<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
+<span class="sourceLineNo">655</span><a name="line.655"></a>
+<span class="sourceLineNo">656</span>  /**<a name="line.656"></a>
+<span class="sourceLineNo">657</span>   * Execute Delete Snapshot operation.<a name="line.657"></a>
+<span class="sourceLineNo">658</span>   * @return DeleteSnapshotResponse (a protobuf wrapped void) if the snapshot existed and was<a name="line.658"></a>
+<span class="sourceLineNo">659</span>   *    deleted properly.<a name="line.659"></a>
+<span class="sourceLineNo">660</span>   * @throws ServiceException wrapping SnapshotDoesNotExistException if specified snapshot did not<a name="line.660"></a>
+<span class="sourceLineNo">661</span>   *    exist.<a name="line.661"></a>
+<span class="sourceLineNo">662</span>   */<a name="line.662"></a>
+<span class="sourceLineNo">663</span>  @Override<a name="line.663"></a>
+<span class="sourceLineNo">664</span>  public DeleteSnapshotResponse deleteSnapshot(RpcController controller,<a name="line.664"></a>
+<span class="sourceLineNo">665</span>      DeleteSnapshotRequest request) throws ServiceException {<a name="line.665"></a>
+<span class="sourceLineNo">666</span>    try {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>      master.checkInitialized();<a name="line.667"></a>
+<span class="sourceLineNo">668</span>      master.snapshotManager.checkSnapshotSupport();<a name="line.668"></a>
+<span class="sourceLineNo">669</span><a name="line.669"></a>
+<span class="sourceLineNo">670</span>      LOG.info(master.getClientIdAuditPrefix() + " delete " + request.getSnapshot());<a name="line.670"></a>
+<span class="sourceLineNo">671</span>      master.snapshotManager.deleteSnapshot(request.getSnapshot());<a name="line.671"></a>
+<span class="sourceLineNo">672</span>      return DeleteSnapshotResponse.newBuilder().build();<a name="line.672"></a>
+<span class="sourceLineNo">673</span>    } catch (IOException e) {<a name="line.673"></a>
+<span class="sourceLineNo">674</span>      throw new ServiceException(e);<a name="line.674"></a>
+<span class="sourceLineNo">675</span>    }<a name="line.675"></a>
+<span class="sourceLineNo">676</span>  }<a name="line.676"></a>
+<span class="sourceLineNo">677</span><a name="line.677"></a>
+<span class="sourceLineNo">678</span>  @Override<a name="line.678"></a>
+<span class="sourceLineNo">679</span>  public DeleteTableResponse deleteTable(RpcController controller,<a name="line.679"></a>
+<span class="sourceLineNo">680</span>      DeleteTableRequest request) throws ServiceException {<a name="line.680"></a>
+<span class="sourceLineNo">681</span>    try {<a name="line.681"></a>
+<span class="sourceLineNo">682</span>      long procId = master.deleteTable(ProtobufUtil.toTableName(<a name="line.682"></a>
+<span class="sourceLineNo">683</span>          request.getTableName()), request.getNonceGroup(), request.getNonce());<a name="line.683"></a>
+<span class="sourceLineNo">684</span>      return DeleteTableResponse.newBuilder().setProcId(procId).build();<a name="line.684"></a>
+<span class="sourceLineNo">685</span>    } catch (IOException ioe) {<a name="line.685"></a>
+<span class="sourceLineNo">686</span>      throw new ServiceException(ioe);<a name="line.686"></a>
+<span class="sourceLineNo">687</span>    }<a name="line.687"></a>
+<span class="sourceLineNo">688</span>  }<a name="line.688"></a>
+<span class="sourceLineNo">689</span><a name="line.689"></a>
+<span class="sourceLineNo">690</span>  @Override<a name="line.690"></a>
+<span class="sourceLineNo">691</span>  public TruncateTableResponse truncateTable(RpcController controller, TruncateTableRequest request)<a name="line.691"></a>
+<span class="sourceLineNo">692</span>      throws ServiceException {<a name="line.692"></a>
+<span class="sourceLineNo">693</span>    try {<a name="line.693"></a>
+<span class="sourceLineNo">694</span>      long procId = master.truncateTable(<a name="line.694"></a>
+<span class="sourceLineNo">695</span>        ProtobufUtil.toTableName(request.getTableName()),<a name="line.695"></a>
+<span class="sourceLineNo">696</span>        request.getPreserveSplits(),<a name="line.696"></a>
+<span class="sourceLineNo">697</span>        request.getNonceGroup(),<a name="line.697"></a>
+<span class="sourceLineNo">698</span>        request.getNonce());<a name="line.698"></a>
+<span class="sourceLineNo">699</span>      return TruncateTableResponse.newBuilder().setProcId(procId).build();<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    } catch (IOException ioe) {<a name="line.700"></a>
+<span class="sourceLineNo">701</span>      throw new ServiceException(ioe);<a name="line.701"></a>
+<span class="sourceLineNo">702</span>    }<a name="line.702"></a>
+<span class="sourceLineNo">703</span>  }<a name="line.703"></a>
+<span class="sourceLineNo">704</span><a name="line.704"></a>
+<span class="sourceLineNo">705</span>  @Override<a name="line.705"></a>
+<span class="sourceLineNo">706</span>  public DisableTableResponse disableTable(RpcController controller,<a name="line.706"></a>
+<span class="sourceLineNo">707</span>      DisableTableRequest request) throws ServiceException {<a name="line.707"></a>
+<span class="sourceLineNo">708</span>    try {<a name="line.708"></a>
+<span class="sourceLineNo">709</span>      long procId = master.disableTable(<a name="line.709"></a>
+<span class="sourceLineNo">710</span>        ProtobufUtil.toTableName(request.getTableName()),<a name="line.710"></a>
+<span class="sourceLineNo">711</span>        request.getNonceGroup(),<a name="line.711"></a>
+<span class="sourceLineNo">712</span>        request.getNonce());<a name="line.712"></a>
+<span class="sourceLineNo">713</span>      return DisableTableResponse.newBuilder().setProcId(procId).build();<a name="line.713"></a>
+<span class="sourceLineNo">714</span>    } catch (IOException ioe) {<a name="line.714"></a>
+<span class="sourceLineNo">715</span>      throw new ServiceException(ioe);<a name="line.715"></a>
+<span class="sourceLineNo">716</span>    }<a name="line.716"></a>
+<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
+<span class="sourceLineNo">718</span><a name="line.718"></a>
+<span class="sourceLineNo">719</span>  @Override<a name="line.719"></a>
+<span class="sourceLineNo">720</span>  public EnableCatalogJanitorResponse enableCatalogJanitor(RpcController c,<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      EnableCatalogJanitorRequest req) throws ServiceException {<a name="line.721"></a>
+<span class="sourceLineNo">722</span>    rpcPreCheck("enableCatalogJanitor");<a name="line.722"></a>
+<span class="sourceLineNo">723</span>    return EnableCatalogJanitorResponse.newBuilder().setPrevValue(<a name="line.723"></a>
+<span class="sourceLineNo">724</span>      master.catalogJanitorChore.setEnabled(req.getEnable())).build();<a name="line.724"></a>
+<span class="sourceLineNo">725</span>  }<a name="line.725"></a>
+<span class="sourceLineNo">726</span><a name="line.726"></a>
+<span class="sourceLineNo">727</span>  @Override<a name="line.727"></a>
+<span class="sourceLineNo">728</span>  public SetCleanerChoreRunningResponse setCleanerChoreRunning(<a name="line.728"></a>
+<span class="sourceLineNo">729</span>    RpcController c, SetCleanerChoreRunningRequest req) throws ServiceException {<a name="line.729"></a>
+<span class="sourceLineNo">730</span>    rpcPreCheck("setCleanerChoreRunning");<a name="line.730"></a>
+<span class="sourceLineNo">731</span><a name="line.731"></a>
+<span class="sourceLineNo">732</span>    boolean prevValue =<a name="line.732"></a>
+<span class="sourceLineNo">733</span>      master.getLogCleaner().getEnabled() &amp;&amp; master.getHFileCleaner().getEnabled();<a name="line.733"></a>
+<span class="sourceLineNo">734</span>    master.getLogCleaner().setEnabled(req.getOn());<a name="line.734"></a>
+<span class="sourceLineNo">735</span>    master.getHFileCleaner().setEnabled(req.getOn());<a name="line.735"></a>
+<span class="sourceLineNo">736</span>    return SetCleanerChoreRunningResponse.newBuilder().setPrevValue(prevValue).build();<a name="line.736"></a>
+<span class="sourceLineNo">737</span>  }<a name="line.737"></a>
+<span class="sourceLineNo">738</span><a name="line.738"></a>
+<span class="sourceLineNo">739</span>  @Override<a name="line.739"></a>
+<span class="sourceLineNo">740</span>  public EnableTableResponse enableTable(RpcController controller,<a name="line.740"></a>
+<span class="sourceLineNo">741</span>      EnableTableRequest request) throws ServiceException {<a name="line.741"></a>
+<span class="sourceLineNo">742</span>    try {<a name="line.742"></a>
+<span class="sourceLineNo">743</span>      long procId = master.enableTable(<a name="line.743"></a>
+<span class="sourceLineNo">744</span>        ProtobufUtil.toTableName(request.getTableName()),<a name="line.744"></a>
+<span class="sourceLineNo">745</span>        request.getNonceGroup(),<a name="line.745"></a>
+<span class="sourceLineNo">746</span>        request.getNonce());<a name="line.746"></a>
+<span class="sourceLineNo">747</span>      return EnableTableResponse.newBuilder().setProcId(procId).build();<a name="line.747"></a>
+<span class="sourceLineNo">748</span>    } catch (IOException ioe) {<a name="line.748"></a>
+<span class="sourceLineNo">749</span>      throw new ServiceException(ioe);<a name="line.749"></a>
+<span class="sourceLineNo">750</span>    }<a name="line.750"></a>
+<span class="sourceLineNo">751</span>  }<a name="line.751"></a>
+<span class="sourceLineNo">752</span><a name="line.752"></a>
+<span class="sourceLineNo">753</span>  @Override<a name="line.753"></a>
+<span class="sourceLineNo">754</span>  public MergeTableRegionsResponse mergeTableRegions(<a name="line.754"></a>
+<span class="sourceLineNo">755</span>      RpcController c, MergeTableRegionsRequest request) throws ServiceException {<a name="line.755"></a>
+<span class="sourceLineNo">756</span>    try {<a name="line.756"></a>
+<span class="sourceLineNo">757</span>      master.checkInitialized();<a name="line.757"></a>
+<span class="sourceLineNo">758</span>    } catch (IOException ioe) {<a name="line.758"></a>
+<span class="sourceLineNo">759</span>      throw new ServiceException(ioe);<a name="line.759"></a>
+<span class="sourceLineNo">760</span>    }<a name="line.760"></a>
 <span class="sourceLineNo">761</span><a name="line.761"></a>
-<span class="sourceLineNo">762</span>    assert(request.getRegionCount() == 2);<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    RegionInfo[] regionsToMerge = new RegionInfo[request.getRegionCount()];<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    for (int i = 0; i &lt; request.getRegionCount(); i++) {<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      final byte[] encodedNameOfRegion = request.getRegion(i).getValue().toByteArray();<a name="line.765"></a>
-<span class="sourceLineNo">766</span>      if (request.getRegion(i).getType() != RegionSpecifierType.ENCODED_REGION_NAME) {<a name="line.766"></a>
-<span class="sourceLineNo">767</span>        LOG.warn("MergeRegions specifier type: expected: "<a name="line.767"></a>
-<span class="sourceLineNo">768</span>          + RegionSpecifierType.ENCODED_REGION_NAME + " actual: region " + i + " ="<a name="line.768"></a>
-<span class="sourceLineNo">769</span>          + request.getRegion(i).getType());<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      }<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      RegionState regionState = regionStates.getRegionState(Bytes.toString(encodedNameOfRegion));<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      if (regionState == null) {<a name="line.772"></a>
-<span class="sourceLineNo">773</span>        throw new ServiceException(<a name="line.773"></a>
-<span class="sourceLineNo">774</span>          new UnknownRegionException(Bytes.toStringBinary(encodedNameOfRegion)));<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      }<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      regionsToMerge[i] = regionState.getRegion();<a name="line.776"></a>
-<span class="sourceLineNo">777</span>    }<a name="line.777"></a>
-<span class="sourceLineNo">778</span><a name="line.778"></a>
-<span class="sourceLineNo">779</span>    try {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      long procId = master.mergeRegions(<a name="line.780"></a>
-<span class="sourceLineNo">781</span>        regionsToMerge,<a name="line.781"></a>
-<span class="sourceLineNo">782</span>        request.getForcible(),<a name="line.782"></a>
-<span class="sourceLineNo">783</span>        request.getNonceGroup(),<a name="line.783"></a>
-<span class="sourceLineNo">784</span>        request.getNonce());<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      return MergeTableRegionsResponse.newBuilder().setProcId(procId).build();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    } catch (IOException ioe) {<a name="line.786"></a>
-<span class="sourceLineNo">787</span>      throw new ServiceException(ioe);<a name="line.787"></a>
-<span class="sourceLineNo">788</span>    }<a name="line.788"></a>
-<span class="sourceLineNo">789</span>  }<a name="line.789"></a>
-<span class="sourceLineNo">790</span><a name="line.790"></a>
-<span class="sourceLineNo">791</span>  @Override<a name="line.791"></a>
-<span class="sourceLineNo">792</span>  public SplitTableRegionResponse splitRegion(final RpcController controller,<a name="line.792"></a>
-<span class="sourceLineNo">793</span>      final SplitTableRegionRequest request) throws ServiceException {<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    try {<a name="line.794"></a>
-<span class="sourceLineNo">795</span>      long procId = master.splitRegion(<a name="line.795"></a>
-<span class="sourceLineNo">796</span>        ProtobufUtil.toRegionInfo(request.getRegionInfo()),<a name="line.796"></a>
-<span class="sourceLineNo">797</span>        request.hasSplitRow() ? request.getSplitRow().toByteArray() : null,<a name="line.797"></a>
-<span class="sourceLineNo">798</span>        request.getNonceGroup(),<a name="line.798"></a>
-<span class="sourceLineNo">799</span>        request.getNonce());<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      return SplitTableRegionResponse.newBuilder().setProcId(procId).build();<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    } catch (IOException ie) {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      throw new ServiceException(ie);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>    }<a name="line.803"></a>
-<span class="sourceLineNo">804</span>  }<a name="line.804"></a>
-<span class="sourceLineNo">805</span><a name="line.805"></a>
-<span class="sourceLineNo">806</span>  @Override<a name="line.806"></a>
-<span class="sourceLineNo">807</span>  public ClientProtos.CoprocessorServiceResponse execMasterService(final RpcController controller,<a name="line.807"></a>
-<span class="sourceLineNo">808</span>      final ClientProtos.CoprocessorServiceRequest request) throws ServiceException {<a name="line.808"></a>
-<span class="sourceLineNo">809</span>    rpcPreCheck("execMasterService");<a name="line.809"></a>
-<span class="sourceLineNo">810</span>    try {<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      ServerRpcController execController = new ServerRpcController();<a name="line.811"></a>
-<span class="sourceLineNo">812</span>      ClientProtos.CoprocessorServiceCall call = request.getCall();<a name="line.812"></a>
-<span class="sourceLineNo">813</span>      String serviceName = call.getServiceName();<a name="line.813"></a>
-<span class="sourceLineNo">814</span>      String methodName = call.getMethodName();<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      if (!master.coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        throw new UnknownProtocolException(null,<a name="line.816"></a>
-<span class="sourceLineNo">817</span>          "No registered Master Coprocessor Endpoint found for " + serviceName +<a name="line.817"></a>
-<span class="sourceLineNo">818</span>          ". Has it been enabled?");<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      }<a name="line.819"></a>
-<span class="sourceLineNo">820</span><a name="line.820"></a>
-<span class="sourceLineNo">821</span>      com.google.protobuf.Service service = master.coprocessorServiceHandlers.get(serviceName);<a name="line.821"></a>
-<span class="sourceLineNo">822</span>      com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc = service.getDescriptorForType();<a name="line.822"></a>
-<span class="sourceLineNo">823</span>      com.google.protobuf.Descriptors.MethodDescriptor methodDesc =<a name="line.823"></a>
-<span class="sourceLineNo">824</span>          CoprocessorRpcUtils.getMethodDescriptor(methodName, serviceDesc);<a name="line.824"></a>
-<span class="sourceLineNo">825</span><a name="line.825"></a>
-<span class="sourceLineNo">826</span>      com.google.protobuf.Message execRequest =<a name="line.826"></a>
-<span class="sourceLineNo">827</span>          CoprocessorRpcUtils.getRequest(service, methodDesc, call.getRequest());<a name="line.827"></a>
-<span class="sourceLineNo">828</span>      final com.google.protobuf.Message.Builder responseBuilder =<a name="line.828"></a>
-<span class="sourceLineNo">829</span>          service.getResponsePrototype(methodDesc).newBuilderForType();<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      service.callMethod(methodDesc, execController, execRequest,<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        (message) -&gt; {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>          if (message != null) {<a name="line.832"></a>
-<span class="sourceLineNo">833</span>            responseBuilder.mergeFrom(message);<a name="line.833"></a>
-<span class="sourceLineNo">834</span>          }<a name="line.834"></a>
-<span class="sourceLineNo">835</span>        });<a name="line.835"></a>
-<span class="sourceLineNo">836</span>      com.google.protobuf.Message execResult = responseBuilder.build();<a name="line.836"></a>
-<span class="sourceLineNo">837</span>      if (execController.getFailedOn() != null) {<a name="line.837"></a>
-<span class="sourceLineNo">838</span>        throw execController.getFailedOn();<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      }<a name="line.839"></a>
-<span class="sourceLineNo">840</span>      return CoprocessorRpcUtils.getResponse(execResult, HConstants.EMPTY_BYTE_ARRAY);<a name="line.840"></a>
-<span class="sourceLineNo">841</span>    } catch (IOException ie) {<a name="line.841"></a>
-<span class="sourceLineNo">842</span>      throw new ServiceException(ie);<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    }<a name="line.843"></a>
-<span class="sourceLineNo">844</span>  }<a name="line.844"></a>
-<span class="sourceLineNo">845</span><a name="line.845"></a>
-<span class="sourceLineNo">846</span>  /**<a name="line.846"></a>
-<span class="sourceLineNo">847</span>   * Triggers an asynchronous attempt to run a distributed procedure.<a name="line.847"></a>
-<span class="sourceLineNo">848</span>   * {@inheritDoc}<a name="line.848"></a>
-<span class="sourceLineNo">849</span>   */<a name="line.849"></a>
-<span class="sourceLineNo">850</span>  @Override<a name="line.850"></a>
-<span class="sourceLineNo">851</span>  public ExecProcedureResponse execProcedure(RpcController controller,<a name="line.851"></a>
-<span class="sourceLineNo">852</span>      ExecProcedureRequest request) throws ServiceException {<a name="line.852"></a>
-<span class="sourceLineNo">853</span>    try {<a name="line.853"></a>
-<span class="sourceLineNo">854</span>      master.checkInitialized();<a name="line.854"></a>
-<span class="sourceLineNo">855</span>      ProcedureDescription desc = request.getProcedure();<a name="line.855"></a>
-<span class="sourceLineNo">856</span>      MasterProcedureManager mpm = master.getMasterProcedureManagerHost().getProcedureManager(<a name="line.856"></a>
-<span class="sourceLineNo">857</span>        desc.getSignature());<a name="line.857"></a>
-<span class="sourceLineNo">858</span>      if (mpm == null) {<a name="line.858"></a>
-<span class="sourceLineNo">859</span>        throw new ServiceException(new DoNotRetryIOException("The procedure is not registered: "<a name="line.859"></a>
-<span class="sourceLineNo">860</span>          + desc.getSignature()));<a name="line.860"></a>
-<span class="sourceLineNo">861</span>      }<a name="line.861"></a>
-<span class="sourceLineNo">862</span>      LOG.info(master.getClientIdAuditPrefix() + " procedure request for: " + desc.getSignature());<a name="line.862"></a>
-<span class="sourceLineNo">863</span>      mpm.checkPermissions(desc, accessChecker, RpcServer.getRequestUser().orElse(null));<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      mpm.execProcedure(desc);<a name="line.864"></a>
-<span class="sourceLineNo">865</span>      // send back the max amount of time the client should wait for the procedure<a name="line.865"></a>
-<span class="sourceLineNo">866</span>      // to complete<a name="line.866"></a>
-<span class="sourceLineNo">867</span>      long waitTime = SnapshotDescriptionUtils.DEFAULT_MAX_WAIT_TIME;<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      return ExecProcedureResponse.newBuilder().setExpectedTimeout(<a name="line.868"></a>
-<span class="sourceLineNo">869</span>        waitTime).build();<a name="line.869"></a>
-<span class="sourceLineNo">870</span>    } catch (ForeignException e) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>      throw new ServiceException(e.getCause());<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    } catch (IOException e) {<a name="line.872"></a>
-<span class="sourceLineNo">873</span>      throw new ServiceException(e);<a name="line.873"></a>
-<span class="sourceLineNo">874</span>    }<a name="line.874"></a>
-<span class="sourceLineNo">875</span>  }<a name="line.875"></a>
-<span class="sourceLineNo">876</span><a name="line.876"></a>
-<span class="sourceLineNo">877</span>  /**<a name="line.877"></a>
-<span class="sourceLineNo">878</span>   * Triggers a synchronous attempt to run a distributed procedure and sets<a name="line.878"></a>
-<span class="sourceLineNo">879</span>   * return data in response.<a name="line.879"></a>
-<span class="sourceLineNo">880</span>   * {@inheritDoc}<a name="line.880"></a>
-<span class="sourceLineNo">881</span>   */<a name="line.881"></a>
-<span class="sourceLineNo">882</span>  @Override<a name="line.882"></a>
-<span class="sourceLineNo">883</span>  public ExecProcedureResponse execProcedureWithRet(RpcController controller,<a name="line.883"></a>
-<span class="sourceLineNo">884</span>      ExecProcedureRequest request) throws ServiceException {<a name="line.884"></a>
-<span class="sourceLineNo">885</span>    rpcPreCheck("execProcedureWithRet");<a name="line.885"></a>
-<span class="sourceLineNo">886</span>    try {<a name="line.886"></a>
-<span class="sourceLineNo">887</span>      ProcedureDescription desc = request.getProcedure();<a name="line.887"></a>
-<span class="sourceLineNo">888</span>      MasterProcedureManager mpm =<a name="line.888"></a>
-<span class="sourceLineNo">889</span>        master.getMasterProcedureManagerHost().getProcedureManager(desc.getSignature());<a name="line.889"></a>
-<span class="sourceLineNo">890</span>      if (mpm == null) {<a name="line.890"></a>
-<span class="sourceLineNo">891</span>        throw new ServiceException("The procedure is not registered: " + desc.getSignature());<a name="line.891"></a>
-<span class="sourceLineNo">892</span>      }<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      LOG.info(master.getClientIdAuditPrefix() + " procedure request for: " + desc.getSignature());<a name="line.893"></a>
-<span class="sourceLineNo">894</span>      byte[] data = mpm.execProcedureWithRet(desc);<a name="line.894"></a>
-<span class="sourceLineNo">895</span>      ExecProcedureResponse.Builder builder = ExecProcedureResponse.newBuilder();<a name="line.895"></a>
-<span class="sourceLineNo">896</span>      // set return data if available<a name="line.896"></a>
-<span class="sourceLineNo">897</span>      if (data != null) {<a name="line.897"></a>
-<span class="sourceLineNo">898</span>        builder.setReturnData(UnsafeByteOperations.unsafeWrap(data));<a name="line.898"></a>
-<span class="sourceLineNo">899</span>      }<a name="line.899"></a>
-<span class="sourceLineNo">900</span>      return builder.build();<a name="line.900"></a>
-<span class="sourceLineNo">901</span>    } catch (IOException e) {<a name="line.901"></a>
-<span class="sourceLineNo">902</span>      throw new ServiceException(e);<a name="line.902"></a>
-<span class="sourceLineNo">903</span>    }<a name="line.903"></a>
-<span class="sourceLineNo">904</span>  }<a name="line.904"></a>
-<span class="sourceLineNo">905</span><a name="line.905"></a>
-<span class="sourceLineNo">906</span>  @Override<a name="line.906"></a>
-<span class="sourceLineNo">907</span>  public GetClusterStatusResponse getClusterStatus(RpcController controller,<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      GetClusterStatusRequest req) throws ServiceException {<a name="line.908"></a>
-<span class="sourceLineNo">909</span>    GetClusterStatusResponse.Builder response = GetClusterStatusResponse.newBuilder();<a name="line.909"></a>
-<span class="sourceLineNo">910</span>    try {<a name="line.910"></a>
-<span class="sourceLineNo">911</span>      master.checkInitialized();<a name="line.911"></a>
-<span class="sourceLineNo">912</span>      response.setClusterStatus(ClusterMetricsBuilder.toClusterStatus(<a name="line.912"></a>
-<span class="sourceLineNo">913</span>        master.getClusterMetrics(ClusterMetricsBuilder.toOptions(req.getOptionsList()))));<a name="line.913"></a>
-<span class="sourceLineNo">914</span>    } catch (IOException e) {<a name="line.914"></a>
-<span class="sourceLineNo">915</span>      throw new ServiceException(e);<a name="line.915"></a>
-<span class="sourceLineNo">916</span>    }<a name="line.916"></a>
-<span class="sourceLineNo">917</span>    return response.build();<a name="line.917"></a>
-<span class="sourceLineNo">918</span>  }<a name="line.918"></a>
-<span class="sourceLineNo">919</span><a name="line.919"></a>
-<span class="sourceLineNo">920</span>  /**<a name="line.920"></a>
-<span class="sourceLineNo">921</span>   * List the currently available/stored snapshots. Any in-progress snapshots are ignored<a name="line.921"></a>
-<span class="sourceLineNo">922</span>   */<a name="line.922"></a>
-<span class="sourceLineNo">923</span>  @Override<a name="line.923"></a>
-<span class="sourceLineNo">924</span>  public GetCompletedSnapshotsResponse getCompletedSnapshots(RpcController controller,<a name="line.924"></a>
-<span class="sourceLineNo">925</span>      GetCompletedSnapshotsRequest request) throws ServiceException {<a name="line.925"></a>
-<span class="sourceLineNo">926</span>    try {<a name="line.926"></a>
-<span class="sourceLineNo">927</span>      master.checkInitialized();<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      GetCompletedSnapshotsResponse.Builder builder = GetCompletedSnapshotsResponse.newBuilder();<a name="line.928"></a>
-<span class="sourceLineNo">929</span>      List&lt;SnapshotDescription&gt; snapshots = master.snapshotManager.getCompletedSnapshots();<a name="line.929"></a>
-<span class="sourceLineNo">930</span><a name="line.930"></a>
-<span class="sourceLineNo">931</span>      // convert to protobuf<a name="line.931"></a>
-<span class="sourceLineNo">932</span>      for (SnapshotDescription snapshot : snapshots) {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>        builder.addSnapshots(snapshot);<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      }<a name="line.934"></a>
-<span class="sourceLineNo">935</span>      return builder.build();<a name="line.935"></a>
-<span class="sourceLineNo">936</span>    } catch (IOException e) {<a name="line.936"></a>
-<span class="sourceLineNo">937</span>      throw new ServiceException(e);<a name="line.937"></a>
-<span class="sourceLineNo">938</span>    }<a name="line.938"></a>
-<span class="sourceLineNo">939</span>  }<a name="line.939"></a>
-<span class="sourceLineNo">940</span><a name="line.940"></a>
-<span class="sourceLineNo">941</span>  @Override<a name="line.941"></a>
-<span class="sourceLineNo">942</span>  public GetNamespaceDescriptorResponse getNamespaceDescriptor(<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      RpcController controller, GetNamespaceDescriptorRequest request)<a name="line.943"></a>
-<span class="sourceLineNo">944</span>      throws ServiceException {<a name="line.944"></a>
-<span class="sourceLineNo">945</span>    try {<a name="line.945"></a>
-<span class="sourceLineNo">946</span>      return GetNamespaceDescriptorResponse.newBuilder()<a name="line.946"></a>
-<span class="sourceLineNo">947</span>        .setNamespaceDescriptor(ProtobufUtil.toProtoNamespaceDescriptor(<a name="line.947"></a>
-<span class="sourceLineNo">948</span>            master.getNamespace(request.getNamespaceName())))<a name="line.948"></a>
-<span class="sourceLineNo">949</span>        .build();<a name="line.949"></a>
-<span class="sourceLineNo">950</span>    } catch (IOException e) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>      throw new ServiceException(e);<a name="line.951"></a>
-<span class="sourceLineNo">952</span>    }<a name="line.952"></a>
-<span class="sourceLineNo">953</span>  }<a name="line.953"></a>
-<span class="sourceLineNo">954</span><a name="line.954"></a>
-<span class="sourceLineNo">955</span>  /**<a name="line.955"></a>
-<span class="sourceLineNo">956</span>   * Get the number of regions of the table that have been updated by the alter.<a name="line.956"></a>
-<span class="sourceLineNo">957</span>   *<a name="line.957"></a>
-<span class="sourceLineNo">958</span>   * @return Pair indicating the number of regions updated Pair.getFirst is the<a name="line.958"></a>
-<span class="sourceLineNo">959</span>   *         regions that are yet to be updated Pair.getSecond is the total number<a name="line.959"></a>
-<span class="sourceLineNo">960</span>   *         of regions of the table<a name="line.960"></a>
-<span class="sourceLineNo">961</span>   * @throws ServiceException<a name="line.961"></a>
-<span class="sourceLineNo">962</span>   */<a name="line.962"></a>
-<span class="sourceLineNo">963</span>  @Override<a name="line.963"></a>
-<span class="sourceLineNo">964</span>  public GetSchemaAlterStatusResponse getSchemaAlterStatus(<a name="line.964"></a>
-<span class="sourceLineNo">965</span>      RpcController controller, GetSchemaAlterStatusRequest req) throws ServiceException {<a name="line.965"></a>
-<span class="sourceLineNo">966</span>    // TODO: currently, we query using the table name on the client side. this<a name="line.966"></a>
-<span class="sourceLineNo">967</span>    // may overlap with other table operations or the table operation may<a name="line.967"></a>
-<span class="sourceLineNo">968</span>    // have completed before querying this API. We need to refactor to a<a name="line.968"></a>
-<span class="sourceLineNo">969</span>    // transaction system in the future to avoid these ambiguities.<a name="line.969"></a>
-<span class="sourceLineNo">970</span>    TableName tableName = ProtobufUtil.toTableName(req.getTableName());<a name="line.970"></a>
-<span class="sourceLineNo">971</span><a name="line.971"></a>
-<span class="sourceLineNo">972</span>    try {<a name="line.972"></a>
-<span class="sourceLineNo">973</span>      master.checkInitialized();<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      Pair&lt;Integer,Integer&gt; pair = master.getAssignmentManager().getReopenStatus(tableName);<a name="line.974"></a>
-<span class="sourceLineNo">975</span>      GetSchemaAlterStatusResponse.Builder ret = GetSchemaAlterStatusResponse.newBuilder();<a name="line.975"></a>
-<span class="sourceLineNo">976</span>      ret.setYetToUpdateRegions(pair.getFirst());<a name="line.976"></a>
-<span class="sourceLineNo">977</span>      ret.setTotalRegions(pair.getSecond());<a name="line.977"></a>
-<span class="sourceLineNo">978</span>      return ret.build();<a name="line.978"></a>
-<span class="sourceLineNo">979</span>    } catch (IOException ioe) {<a name="line.979"></a>
-<span class="sourceLineNo">980</span>      throw new ServiceException(ioe);<a name="line.980"></a>
-<span class="sourceLineNo">981</span>    }<a name="line.981"></a>
-<span class="sourceLineNo">982</span>  }<a name="line.982"></a>
-<span class="sourceLineNo">983</span><a name="line.983"></a>
-<span class="sourceLineNo">984</span>  /**<a name="line.984"></a>
-<span class="sourceLineNo">985</span>   * Get list of TableDescriptors for requested tables.<a name="line.985"></a>
-<span class="sourceLineNo">986</span>   * @param c Unused (set to null).<a name="line.986"></a>
-<span class="sourceLineNo">987</span>   * @param req GetTableDescriptorsRequest that contains:<a name="line.987"></a>
-<span class="sourceLineNo">988</span>   * - tableNames: requested tables, or if empty, all are requested<a name="line.988"></a>
-<span class="sourceLineNo">989</span>   * @return GetTableDescriptorsResponse<a name="line.989"></a>
-<span class="sourceLineNo">990</span>   * @throws ServiceException<a name="line.990"></a>
-<span class="sourceLineNo">991</span>   */<a name="line.991"></a>
-<span class="sourceLineNo">992</span>  @Override<a name="line.992"></a>
-<span class="sourceLineNo">993</span>  public GetTableDescriptorsResponse getTableDescriptors(RpcController c,<a name="line.993"></a>
-<span class="sourceLineNo">994</span>      GetTableDescriptorsRequest req) throws ServiceException {<a name="line.994"></a>
-<span class="sourceLineNo">995</span>    try {<a name="line.995"></a>
-<span class="sourceLineNo">996</span>      master.checkInitialized();<a name="line.996"></a>
-<span class="sourceLineNo">997</span><a name="line.997"></a>
-<span class="sourceLineNo">998</span>      final String regex = req.hasRegex() ? req.getRegex() : null;<a name="line.998"></a>
-<span class="sourceLineNo">999</span>      final String namespace = req.hasNamespace() ? req.getNamespace() : null;<a name="line.999"></a>
-<span class="sourceLineNo">1000</span>      List&lt;TableName&gt; tableNameList = null;<a name="line.1000"></a>
-<span class="sourceLineNo">1001</span>      if (req.getTableNamesCount() &gt; 0) {<a name="line.1001"></a>
-<span class="sourceLineNo">1002</span>        tableNameList = new ArrayList&lt;TableName&gt;(req.getTableNamesCount());<a name="line.1002"></a>
-<span class="sourceLineNo">1003</span>        for (HBaseProtos.TableName tableNamePB: req.getTableNamesList()) {<a name="line.1003"></a>
-<span class="sourceLineNo">1004</span>          tableNameList.add(ProtobufUtil.toTableName(tableNamePB));<a name="line.1004"></a>
-<span class="sourceLineNo">1005</span>        }<a name="line.1005"></a>
-<span class="sourceLineNo">1006</span>      }<a name="line.1006"></a>
-<span class="sourceLineNo">1007</span><a name="line.1007"></a>
-<span class="sourceLineNo">1008</span>      List&lt;TableDescriptor&gt; descriptors = master.listTableDescriptors(namespace, regex,<a name="line.1008"></a>
-<span class="sourceLineNo">1009</span>          tableNameList, req.getIncludeSysTables());<a name="line.1009"></a>
-<span class="sourceLineNo">1010</span><a name="line.1010"></a>
-<span class="sourceLineNo">1011</span>      GetTableDescriptorsResponse.Builder builder = GetTableDescriptorsResponse.newBuilder();<a name="line.1011"></a>
-<span class="sourceLineNo">1012</span>      if (descriptors != null &amp;&amp; descriptors.size() &gt; 0) {<a name="line.1012"></a>
-<span class="sourceLineNo">1013</span>        // Add the table descriptors to the response<a name="line.1013"></a>
-<span class="sourceLineNo">1014</span>        for (TableDescriptor htd: descriptors) {<a name="line.1014"></a>
-<span class="sourceLineNo">1015</span>          builder.addTableSchema(ProtobufUtil.toTableSchema(htd));<a name="line.1015"></a>
-<span class="sourceLineNo">1016</span>        }<a name="line.1016"></a>
-<span class="sourceLineNo">1017</span>      }<a name="line.1017"></a>
-<span class="sourceLineNo">1018</span>      return builder.build();<a name="line.1018"></a>
-<span class="sourceLineNo">1019</span>    } catch (IOException ioe) {<a name="line.1019"></a>
-<span class="sourceLineNo">1020</span>      throw new ServiceException(ioe);<a name="line.1020"></a>
-<span class="sourceLineNo">1021</span>    }<a name="line.1021"></a>
-<span class="sourceLineNo">1022</span>  }<a name="line.1022"></a>
-<span class="sourceLineNo">1023</span><a name="line.1023"></a>
-<span class="sourceLineNo">1024</span>  /**<a name="line.1024"></a>
-<span class="sourceLineNo">1025</span>   * Get list of userspace table names<a name="line.1025"></a>
-<span class="sourceLineNo">1026</span>   * @param controller Unused (set to null).<a name="line.1026"></a>
-<span class="sourceLineNo">1027</span>   * @param req GetTableNamesRequest<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>   * @return GetTableNamesResponse<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>   * @throws ServiceException<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>   */<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>  @Override<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>  public GetTableNamesResponse getTableNames(RpcController controller,<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>      GetTableNamesRequest req) throws ServiceException {<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>    try {<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>      master.checkServiceStarted();<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span><a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>      final String regex = req.hasRegex() ? req.getRegex() : null;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>      final String namespace = req.hasNamespace() ? req.getNamespace() : null;<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>      List&lt;TableName&gt; tableNames = master.listTableNames(namespace, regex,<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          req.getIncludeSysTables());<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span><a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>      GetTableNamesResponse.Builder builder = GetTableNamesResponse.newBuilder();<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>      if (tableNames != null &amp;&amp; tableNames.size() &gt; 0) {<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>        // Add the table names to the response<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        for (TableName table: tableNames) {<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span>          builder.addTableNames(ProtobufUtil.toProtoTableName(table));<a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        }<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>      }<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>      return builder.build();<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>    } catch (IOException e) {<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>      throw new ServiceException(e);<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>    }<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span>  }<a name="line.1053"></a>
-<span class="sourceLineNo">1054</span><a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>  @Override<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>  public GetTableStateResponse getTableState(RpcController controller,<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>      GetTableStateRequest request) throws ServiceException {<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>    try {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>      master.checkServiceStarted();<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>      TableName tableName = ProtobufUtil.toTableName(request.getTableName());<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>      TableState ts = master.getTableStateManager().getTableState(tableName);<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      GetTableStateResponse.Builder builder = GetTableStateResponse.newBuilder();<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>      builder.setTableState(ts.convert());<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>      return builder.build();<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>    } catch (IOException e) {<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      throw new ServiceException(e);<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>    }<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>  }<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span><a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>  @Override<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>  public IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled(RpcController c,<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span>      IsCatalogJanitorEnabledRequest req) throws ServiceException {<a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    return IsCatalogJanitorEnabledResponse.newBuilder().setValue(<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>      master.isCatalogJanitorEnabled()).build();<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>  }<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span><a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>  @Override<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>  public IsCleanerChoreEnabledResponse isCleanerChoreEnabled(RpcController c,<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>                                                             IsCleanerChoreEnabledRequest req)<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    throws ServiceException {<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span>    return IsCleanerChoreEnabledResponse.newBuilder().setValue(master.isCleanerChoreEnabled())<a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>                                        .build();<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>  }<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span><a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  @Override<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span>  public IsMasterRunningResponse isMasterRunning(RpcController c,<a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>      IsMasterRunningRequest req) throws ServiceException {<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>    try {<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>      master.checkServiceStarted();<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>      return IsMasterRunningResponse.newBuilder().setIsMasterRunning(<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>        !master.isStopped()).build();<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>    } catch (IOException e) {<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>      throw new ServiceException(e);<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>    }<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  }<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span><a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>  /**<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>   * Checks if the specified procedure is done.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span>   * @return true if the procedure is done, false if the procedure is in the process of completing<a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>   * @throws ServiceException if invalid procedure or failed procedure with progress failure reason.<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>   */<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>  @Override<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>  public IsProcedureDoneResponse isProcedureDone(RpcController controller,<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span>      IsProcedureDoneRequest request) throws ServiceException {<a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    try {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      master.checkInitialized();<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      ProcedureDescription desc = request.getProcedure();<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>      MasterProcedureManager mpm = master.getMasterProcedureManagerHost().getProcedureManager(<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span>        desc.getSignature());<a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>      if (mpm == null) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>        throw new ServiceException("The procedure is not registered: "<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>          + desc.getSignature());<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>      }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span>      LOG.debug("Checking to see if procedure from request:"<a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>        + desc.getSignature() + " is done");<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span><a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      IsProcedureDoneResponse.Builder builder =<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>        IsProcedureDoneResponse.newBuilder();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      boolean done = mpm.isProcedureDone(desc);<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      builder.setDone(done);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>      return builder.build();<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span>    } catch (ForeignException e) {<a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>      throw new ServiceException(e.getCause());<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    } catch (IOException e) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      throw new ServiceException(e);<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>    }<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>  }<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span><a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>  /**<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>   * Checks if the specified snapshot is done.<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span>   * @return true if the snapshot is in file system ready to use,<a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>   *   false if the snapshot is in the process of completing<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>   * @throws ServiceException wrapping UnknownSnapshotException if invalid snapshot, or<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span>   *  a wrapped HBaseSnapshotException with progress failure reason.<a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>   */<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>  @Override<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>  public IsSnapshotDoneResponse isSnapshotDone(RpcController controller,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>      IsSnapshotDoneRequest request) throws ServiceException {<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>    LOG.debug("Checking to see if snapshot from request:" +<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>      ClientSnapshotDescriptionUtils.toString(request.getSnapshot()) + " is done");<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>    try {<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>      master.checkInitialized();<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>      IsSnapshotDoneResponse.Builder builder = IsSnapshotDoneResponse.newBuilder();<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>      boolean done = master.snapshotManager.isSnapshotDone(request.getSnapshot());<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>      builder.setDone(done);<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>      return builder.build();<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    } catch (ForeignException e) {<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>      throw new ServiceException(e.getCause());<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    } catch (IOException e) {<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      throw new ServiceException(e);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    }<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>  }<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span><a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>  @Override<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>  public GetProcedureResultResponse getProcedureResult(RpcController controller,<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span>      GetProcedureResultRequest request) throws ServiceException {<a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>    LOG.debug("Checking to see if procedure is done pid=" + request.getProcId());<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>    try {<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>      master.checkInitialized();<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      GetProcedureResultResponse.Builder builder = GetProcedureResultResponse.newBuilder();<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span>      long procId = request.getProcId();<a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      ProcedureExecutor&lt;?&gt; executor = master.getMasterProcedureExecutor();<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      Procedure&lt;?&gt; result = executor.getResultOrProcedure(procId);<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      if (result != null) {<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span>        builder.setSubmittedTime(result.getSubmittedTime());<a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>        builder.setLastUpdate(result.getLastUpdate());<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>        if (executor.isFinished(procId)) {<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>          builder.setState(GetProcedureResultResponse.State.FINISHED);<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>          if (result.isFailed()) {<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>            IOException exception = result.getException().unwrapRemoteIOException();<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>            builder.setException(ForeignExceptionUtil.toProtoForeignException(exception));<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>          }<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>          byte[] resultData = result.getResult();<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>          if (resultData != null) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>            builder.setResult(UnsafeByteOperations.unsafeWrap(resultData));<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>          }<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>          master.getMasterProcedureExecutor().removeResult(request.getProcId());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        } else {<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          builder.setState(GetProcedureResultResponse.State.RUNNING);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        }<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span>      } else {<a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        builder.setState(GetProcedureResultResponse.State.NOT_FOUND);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>      }<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span>      return builder.build();<a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>    } catch (IOException e) {<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      throw new ServiceException(e);<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>    }<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>  }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span><a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  @Override<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span>  public AbortProcedureResponse abortProcedure(<a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>      RpcController rpcController, AbortProcedureRequest request) throws ServiceException {<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>    try {<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>      AbortProcedureResponse.Builder response = AbortProcedureResponse.newBuilder();<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>      boolean abortResult =<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>          master.abortProcedure(request.getProcId(), request.getMayInterruptIfRunning());<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>      response.setIsProcedureAborted(abortResult);<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>      return response.build();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    } catch (IOException e) {<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>      throw new ServiceException(e);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    }<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>  }<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span><a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>  @Override<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span>  public ListNamespaceDescriptorsResponse listNamespaceDescriptors(RpcController c,<a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>      ListNamespaceDescriptorsRequest request) throws ServiceException {<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    try {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      ListNamespaceDescriptorsResponse.Builder response =<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>        ListNamespaceDescriptorsResponse.newBuilder();<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      for(NamespaceDescriptor ns: master.getNamespaces()) {<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span>        response.addNamespaceDescriptor(ProtobufUtil.toProtoNamespaceDescriptor(ns));<a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      }<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      return response.build();<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>    } catch (IOException e) {<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span>      throw new ServiceException(e);<a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>    }<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>  }<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>  @Override<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>  public GetProceduresResponse getProcedures(<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      RpcController rpcController,<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      GetProceduresRequest request) throws ServiceException {<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span>    try {<a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      final GetProceduresResponse.Builder response = GetProceduresResponse.newBuilder();<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>      for (Procedure&lt;?&gt; p: master.getProcedures()) {<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>        response.addProcedure(ProcedureUtil.convertToProtoProcedure(p));<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span>      }<a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      return response.build();<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>    } catch (IOException e) {<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      throw new ServiceException(e);<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>    }<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>  }<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span><a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>  @Override<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>  public GetLocksResponse getLocks(<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      RpcController controller,<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span>      GetLocksRequest request) throws ServiceException {<a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>    try {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>      final GetLocksResponse.Builder builder = GetLocksResponse.newBuilder();<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span><a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>      for (LockedResource lockedResource: master.getLocks()) {<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>        builder.addLock(ProcedureUtil.convertToProtoLockedResource(lockedResource));<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span>      }<a name="line.1243"></a>
-<span class="sourceLineNo">1244</span><a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>      return builder.build();<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    } catch (IOException e) {<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>      throw new ServiceException(e);<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    }<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>  }<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span><a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>  @Override<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>  public ListTableDescriptorsByNamespaceResponse listTableDescriptorsByNamespace(RpcController c,<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      ListTableDescriptorsByNamespaceRequest request) throws ServiceException {<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    try {<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>      ListTableDescriptorsByNamespaceResponse.Builder b =<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>          ListTableDescriptorsByNamespaceResponse.newBuilder();<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>      for (TableDescriptor htd : master<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span>          .listTableDescriptorsByNamespace(request.getNamespaceName())) {<a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>        b.addTableSchema(ProtobufUtil.toTableSchema(htd));<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>      }<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>      return b.build();<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>    } catch (IOException e) {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>      throw new ServiceException(e);<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>    }<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>  }<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span><a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>  @Override<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>  public ListTableNamesByNamespaceResponse listTableNamesByNamespace(RpcController c,<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>      ListTableNamesByNamespaceRequest request) throws ServiceException {<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>    try {<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span>      ListTableNamesByNamespaceResponse.Builder b =<a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>        ListTableNamesByNamespaceResponse.newBuilder();<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>      for (TableName tableName: master.listTableNamesByNamespace(request.getNamespaceName())) {<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>        b.addTableName(ProtobufUtil.toProtoTableName(tableName));<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>      }<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>      return b.build();<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    } catch (IOException e) {<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>      throw new ServiceException(e);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    }<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>  }<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span><a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>  @Override<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>  public ModifyColumnResponse modifyColumn(RpcController controller,<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>      ModifyColumnRequest req) throws ServiceException {<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>    try {<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>      long procId = master.modifyColumn(<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>        ProtobufUtil.toTableName(req.getTableName()),<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>        ProtobufUtil.toColumnFamilyDescriptor(req.getColumnFamilies()),<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>        req.getNonceGroup(),<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>        req.getNonce());<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span>      if (procId == -1) {<a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>        // This mean operation was not performed in server, so do not set any procId<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>        return ModifyColumnResponse.newBuilder().build();<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>      } else {<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span>        return ModifyColumnResponse.newBuilder().setProcId(procId).build();<a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>      }<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>    } catch (IOException ioe) {<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>      throw new ServiceException(ioe);<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>    }<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  }<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span><a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>  @Override<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>  public ModifyNamespaceResponse modifyNamespace(RpcController controller,<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>      ModifyNamespaceRequest request) throws ServiceException {<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>    try {<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      long procId = master.modifyNamespace(<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>        ProtobufUtil.toNamespaceDescriptor(request.getNamespaceDescriptor()),<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>        request.getNonceGroup(),<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span>        request.getNonce());<a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>      return ModifyNamespaceResponse.newBuilder().setProcId(procId).build();<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>    } catch (IOException e) {<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      throw new ServiceException(e);<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span>  }<a name="line.1314"></a>
-<span class="sourceLineNo">1315</span><a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>  @Override<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>  public ModifyTableResponse modifyTable(RpcController controller,<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>      ModifyTableRequest req) throws ServiceException {<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>    try {<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span>      long procId = master.modifyTable(<a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>        ProtobufUtil.toTableName(req.getTableName()),<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>        ProtobufUtil.toTableDescriptor(req.getTableSchema()),<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>        req.getNonceGroup(),<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>        req.getNonce());<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>      return ModifyTableResponse.newBuilder().setProcId(procId).build();<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    } catch (IOException ioe) {<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>      throw new ServiceException(ioe);<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    }<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>  }<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span><a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>  @Override<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  public MoveRegionResponse moveRegion(RpcController controller,<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span>      MoveRegionRequest req) throws ServiceException {<a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>    final byte [] encodedRegionName = req.getRegion().getValue().toByteArray();<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>    RegionSpecifierType type = req.getRegion().getType();<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>    final byte [] destServerName = (req.hasDestServerName())?<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>      Bytes.toBytes(ProtobufUtil.toServerName(req.getDestServerName()).getServerName()):null;<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>    MoveRegionResponse mrr = MoveRegionResponse.newBuilder().build();<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span><a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    if (type != RegionSpecifierType.ENCODED_REGION_NAME) {<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>      LOG.warn("moveRegion specifier type: expected: " + RegionSpecifierType.ENCODED_REGION_NAME<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span>        + " actual: " + type);<a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    }<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span><a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    try {<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span>      master.checkInitialized();<a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>      master.move(encodedRegionName, destServerName);<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>    } catch (IOException ioe) {<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      throw new ServiceException(ioe);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span>    return mrr;<a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>  }<a name="line.1352"></a>
-<span class="sourceLineNo">1353

<TRUNCATED>

[06/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.html
----------------------------------------------------------------------
diff --git a/testapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.html b/testapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.html
index 00c8bf0..1e87652 100644
--- a/testapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.html
+++ b/testapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":42,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":42,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Public
-public class <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.56">MiniHBaseCluster</a>
+public class <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.57">MiniHBaseCluster</a>
 extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">HBaseCluster</a></pre>
 <div class="block">This class creates a single process HBase cluster.
  each server.  The master uses the 'default' FileSystem.  The RegionServers,
@@ -416,38 +416,45 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 </tr>
 <tr id="i36" class="altColor">
 <td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#killNameNode-org.apache.hadoop.hbase.ServerName-">killNameNode</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>
+<div class="block">Kills the namenode process if this is a distributed cluster, otherwise, this causes master to
+ exit doing basic clean up only.</div>
+</td>
+</tr>
+<tr id="i37" class="rowColor">
+<td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#killRegionServer-org.apache.hadoop.hbase.ServerName-">killRegionServer</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>
 <div class="block">Kills the region server process if this is a distributed cluster, otherwise
  this causes the region server to exit doing basic clean up only.</div>
 </td>
 </tr>
-<tr id="i37" class="rowColor">
+<tr id="i38" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#killZkNode-org.apache.hadoop.hbase.ServerName-">killZkNode</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>
 <div class="block">Kills the zookeeper node process if this is a distributed cluster, otherwise,
  this causes master to exit doing basic clean up only.</div>
 </td>
 </tr>
-<tr id="i38" class="altColor">
+<tr id="i39" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#shutdown--">shutdown</a></span>()</code>
 <div class="block">Shut down the mini HBase cluster</div>
 </td>
 </tr>
-<tr id="i39" class="rowColor">
+<tr id="i40" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startDataNode-org.apache.hadoop.hbase.ServerName-">startDataNode</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>
 <div class="block">Starts a new datanode on the given hostname or if this is a mini/local cluster,
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i40" class="altColor">
+<tr id="i41" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startMaster--">startMaster</a></span>()</code>
 <div class="block">Starts a master thread running</div>
 </td>
 </tr>
-<tr id="i41" class="rowColor">
+<tr id="i42" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startMaster-java.lang.String-int-">startMaster</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
            int&nbsp;port)</code>
@@ -455,13 +462,20 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
  starts a master locally.</div>
 </td>
 </tr>
-<tr id="i42" class="altColor">
+<tr id="i43" class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startNameNode-org.apache.hadoop.hbase.ServerName-">startNameNode</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>
+<div class="block">Starts a new namenode on the given hostname or if this is a mini/local cluster, silently logs
+ warning message.</div>
+</td>
+</tr>
+<tr id="i44" class="altColor">
 <td class="colFirst"><code><a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.RegionServerThread</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startRegionServer--">startRegionServer</a></span>()</code>
 <div class="block">Starts a region server thread running</div>
 </td>
 </tr>
-<tr id="i43" class="rowColor">
+<tr id="i45" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startRegionServer-java.lang.String-int-">startRegionServer</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                  int&nbsp;port)</code>
@@ -469,13 +483,13 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
  starts a region server locally.</div>
 </td>
 </tr>
-<tr id="i44" class="altColor">
+<tr id="i46" class="altColor">
 <td class="colFirst"><code><a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.RegionServerThread</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startRegionServerAndWait-long-">startRegionServerAndWait</a></span>(long&nbsp;timeout)</code>
 <div class="block">Starts a region server thread and waits until its processed by master.</div>
 </td>
 </tr>
-<tr id="i45" class="rowColor">
+<tr id="i47" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#startZkNode-java.lang.String-int-">startZkNode</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
            int&nbsp;port)</code>
@@ -483,120 +497,140 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i46" class="altColor">
+<tr id="i48" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopDataNode-org.apache.hadoop.hbase.ServerName-">stopDataNode</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>
 <div class="block">Stops the datanode if this is a distributed cluster, otherwise
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i47" class="rowColor">
+<tr id="i49" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopMaster-int-">stopMaster</a></span>(int&nbsp;serverNumber)</code>
 <div class="block">Shut down the specified master cleanly</div>
 </td>
 </tr>
-<tr id="i48" class="altColor">
+<tr id="i50" class="altColor">
 <td class="colFirst"><code><a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopMaster-int-boolean-">stopMaster</a></span>(int&nbsp;serverNumber,
           boolean&nbsp;shutdownFS)</code>
 <div class="block">Shut down the specified master cleanly</div>
 </td>
 </tr>
-<tr id="i49" class="rowColor">
+<tr id="i51" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopMaster-org.apache.hadoop.hbase.ServerName-">stopMaster</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>
 <div class="block">Stops the given master, by attempting a gradual stop.</div>
 </td>
 </tr>
-<tr id="i50" class="altColor">
+<tr id="i52" class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopNameNode-org.apache.hadoop.hbase.ServerName-">stopNameNode</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>
+<div class="block">Stops the namenode if this is a distributed cluster, otherwise silently logs warning message.</div>
+</td>
+</tr>
+<tr id="i53" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.RegionServerThread</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopRegionServer-int-">stopRegionServer</a></span>(int&nbsp;serverNumber)</code>
 <div class="block">Shut down the specified region server cleanly</div>
 </td>
 </tr>
-<tr id="i51" class="rowColor">
+<tr id="i54" class="altColor">
 <td class="colFirst"><code><a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.RegionServerThread</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopRegionServer-int-boolean-">stopRegionServer</a></span>(int&nbsp;serverNumber,
                 boolean&nbsp;shutdownFS)</code>
 <div class="block">Shut down the specified region server cleanly</div>
 </td>
 </tr>
-<tr id="i52" class="altColor">
+<tr id="i55" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopRegionServer-org.apache.hadoop.hbase.ServerName-">stopRegionServer</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>
 <div class="block">Stops the given region server, by attempting a gradual stop.</div>
 </td>
 </tr>
-<tr id="i53" class="rowColor">
+<tr id="i56" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#stopZkNode-org.apache.hadoop.hbase.ServerName-">stopZkNode</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>
 <div class="block">Stops the region zookeeper if this is a distributed cluster, otherwise
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i54" class="altColor">
+<tr id="i57" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForActiveAndReadyMaster-long-">waitForActiveAndReadyMaster</a></span>(long&nbsp;timeout)</code>
 <div class="block">Blocks until there is an active master and that master has completed
  initialization.</div>
 </td>
 </tr>
-<tr id="i55" class="rowColor">
+<tr id="i58" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForDataNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForDataNodeToStart</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                       long&nbsp;timeout)</code>
 <div class="block">Wait for the specified datanode to join the cluster</div>
 </td>
 </tr>
-<tr id="i56" class="altColor">
+<tr id="i59" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForDataNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForDataNodeToStop</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                      long&nbsp;timeout)</code>
 <div class="block">Wait for the specified datanode to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i57" class="rowColor">
+<tr id="i60" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForMasterToStop-org.apache.hadoop.hbase.ServerName-long-">waitForMasterToStop</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                    long&nbsp;timeout)</code>
 <div class="block">Wait for the specified master to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i58" class="altColor">
+<tr id="i61" class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStart</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
+                      long&nbsp;timeout)</code>
+<div class="block">Wait for the specified namenode to join the cluster</div>
+</td>
+</tr>
+<tr id="i62" class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStop</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
+                     long&nbsp;timeout)</code>
+<div class="block">Wait for the specified namenode to stop</div>
+</td>
+</tr>
+<tr id="i63" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForRegionServerToStop-org.apache.hadoop.hbase.ServerName-long-">waitForRegionServerToStop</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                          long&nbsp;timeout)</code>
 <div class="block">Wait for the specified region server to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i59" class="rowColor">
+<tr id="i64" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForZkNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForZkNodeToStart</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                     long&nbsp;timeout)</code>
 <div class="block">Wait for the specified zookeeper node to join the cluster</div>
 </td>
 </tr>
-<tr id="i60" class="altColor">
+<tr id="i65" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForZkNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForZkNodeToStop</a></span>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                    long&nbsp;timeout)</code>
 <div class="block">Wait for the specified zookeeper node to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i61" class="rowColor">
+<tr id="i66" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitOnMaster-int-">waitOnMaster</a></span>(int&nbsp;serverNumber)</code>
 <div class="block">Wait for the specified master to stop.</div>
 </td>
 </tr>
-<tr id="i62" class="altColor">
+<tr id="i67" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitOnRegionServer-int-">waitOnRegionServer</a></span>(int&nbsp;serverNumber)</code>
 <div class="block">Wait for the specified region server to stop.</div>
 </td>
 </tr>
-<tr id="i63" class="rowColor">
+<tr id="i68" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#waitUntilShutDown--">waitUntilShutDown</a></span>()</code>
 <div class="block">Wait for HBase Cluster to shut down.</div>
@@ -637,7 +671,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hbaseCluster</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/LocalHBaseCluster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">LocalHBaseCluster</a> <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.58">hbaseCluster</a></pre>
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/LocalHBaseCluster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">LocalHBaseCluster</a> <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.59">hbaseCluster</a></pre>
 </li>
 </ul>
 </li>
@@ -654,7 +688,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>MiniHBaseCluster</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.67">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.68">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                         int&nbsp;numRegionServers)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -675,7 +709,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>MiniHBaseCluster</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.79">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.80">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                         int&nbsp;numMasters,
                         int&nbsp;numRegionServers)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
@@ -698,7 +732,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>MiniHBaseCluster</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.90">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.91">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                         int&nbsp;numMasters,
                         int&nbsp;numRegionServers,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/master/HMaster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.master">HMaster</a>&gt;&nbsp;masterClass,
@@ -723,7 +757,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MiniHBaseCluster</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.105">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.106">MiniHBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                         int&nbsp;numMasters,
                         int&nbsp;numRegionServers,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;rsPorts,
@@ -757,7 +791,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getConfiguration</h4>
-<pre>public&nbsp;org.apache.hadoop.conf.Configuration&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.119">getConfiguration</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.conf.Configuration&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.120">getConfiguration</a>()</pre>
 </li>
 </ul>
 <a name="startRegionServer-java.lang.String-int-">
@@ -766,7 +800,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>startRegionServer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.270">startRegionServer</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.271">startRegionServer</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                               int&nbsp;port)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#startRegionServer-java.lang.String-int-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
@@ -788,7 +822,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>killRegionServer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.275">killRegionServer</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.276">killRegionServer</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#killRegionServer-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
 <div class="block">Kills the region server process if this is a distributed cluster, otherwise
@@ -807,7 +841,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>isKilledRS</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.286">isKilledRS</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.287">isKilledRS</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#isKilledRS-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
 <div class="block">Keeping track of killed servers and being able to check if a particular server was killed makes
  it possible to do fault tolerance testing for dead servers in a deterministic way. A concrete
@@ -826,7 +860,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>stopRegionServer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.291">stopRegionServer</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.292">stopRegionServer</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#stopRegionServer-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
 <div class="block">Stops the given region server, by attempting a gradual stop.</div>
@@ -844,7 +878,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForRegionServerToStop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.296">waitForRegionServerToStop</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.297">waitForRegionServerToStop</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                                       long&nbsp;timeout)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#waitForRegionServerToStop-org.apache.hadoop.hbase.ServerName-long-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
@@ -863,7 +897,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>startZkNode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.302">startZkNode</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.303">startZkNode</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                         int&nbsp;port)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#startZkNode-java.lang.String-int-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
@@ -885,7 +919,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>killZkNode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.307">killZkNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.308">killZkNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#killZkNode-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
 <div class="block">Kills the zookeeper node process if this is a distributed cluster, otherwise,
@@ -904,7 +938,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>stopZkNode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.312">stopZkNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.313">stopZkNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#stopZkNode-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
 <div class="block">Stops the region zookeeper if this is a distributed cluster, otherwise
@@ -923,7 +957,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForZkNodeToStart</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.317">waitForZkNodeToStart</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.318">waitForZkNodeToStart</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                                  long&nbsp;timeout)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#waitForZkNodeToStart-org.apache.hadoop.hbase.ServerName-long-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
@@ -942,7 +976,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForZkNodeToStop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.322">waitForZkNodeToStop</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.323">waitForZkNodeToStop</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                                 long&nbsp;timeout)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#waitForZkNodeToStop-org.apache.hadoop.hbase.ServerName-long-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
@@ -961,7 +995,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>startDataNode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.327">startDataNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.328">startDataNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#startDataNode-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
 <div class="block">Starts a new datanode on the given hostname or if this is a mini/local cluster,
@@ -980,7 +1014,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>killDataNode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.332">killDataNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.333">killDataNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#killDataNode-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
 <div class="block">Kills the datanode process if this is a distributed cluster, otherwise,
@@ -999,7 +1033,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>stopDataNode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.337">stopDataNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.338">stopDataNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#stopDataNode-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
 <div class="block">Stops the datanode if this is a distributed cluster, otherwise
@@ -1018,7 +1052,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForDataNodeToStart</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.342">waitForDataNodeToStart</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.343">waitForDataNodeToStart</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                                    long&nbsp;timeout)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#waitForDataNodeToStart-org.apache.hadoop.hbase.ServerName-long-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
@@ -1037,7 +1071,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForDataNodeToStop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.347">waitForDataNodeToStop</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.348">waitForDataNodeToStop</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                                   long&nbsp;timeout)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#waitForDataNodeToStop-org.apache.hadoop.hbase.ServerName-long-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
@@ -1050,13 +1084,107 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 </dl>
 </li>
 </ul>
+<a name="startNameNode-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>startNameNode</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.353">startNameNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
+                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#startNameNode-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
+<div class="block">Starts a new namenode on the given hostname or if this is a mini/local cluster, silently logs
+ warning message.</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#startNameNode-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">startNameNode</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong</dd>
+</dl>
+</li>
+</ul>
+<a name="killNameNode-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>killNameNode</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.358">killNameNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
+                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#killNameNode-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
+<div class="block">Kills the namenode process if this is a distributed cluster, otherwise, this causes master to
+ exit doing basic clean up only.</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#killNameNode-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">killNameNode</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong</dd>
+</dl>
+</li>
+</ul>
+<a name="stopNameNode-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>stopNameNode</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.363">stopNameNode</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
+                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#stopNameNode-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
+<div class="block">Stops the namenode if this is a distributed cluster, otherwise silently logs warning message.</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#stopNameNode-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">stopNameNode</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong</dd>
+</dl>
+</li>
+</ul>
+<a name="waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>waitForNameNodeToStart</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.368">waitForNameNodeToStart</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
+                                   long&nbsp;timeout)
+                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
+<div class="block">Wait for the specified namenode to join the cluster</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-" title="class or interface in org.apache.hadoop.hbase">waitForNameNodeToStart</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong or timeout occurs</dd>
+</dl>
+</li>
+</ul>
+<a name="waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>waitForNameNodeToStop</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.373">waitForNameNodeToStop</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
+                                  long&nbsp;timeout)
+                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
+<div class="block">Wait for the specified namenode to stop</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-" title="class or interface in org.apache.hadoop.hbase">waitForNameNodeToStop</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong or timeout occurs</dd>
+</dl>
+</li>
+</ul>
 <a name="startMaster-java.lang.String-int-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>startMaster</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.352">startMaster</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.378">startMaster</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                         int&nbsp;port)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#startMaster-java.lang.String-int-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
@@ -1078,7 +1206,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>killMaster</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.357">killMaster</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.383">killMaster</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#killMaster-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
 <div class="block">Kills the master process if this is a distributed cluster, otherwise,
@@ -1097,7 +1225,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>stopMaster</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.362">stopMaster</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.388">stopMaster</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#stopMaster-org.apache.hadoop.hbase.ServerName-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
 <div class="block">Stops the given master, by attempting a gradual stop.</div>
@@ -1115,7 +1243,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForMasterToStop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.367">waitForMasterToStop</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.393">waitForMasterToStop</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/ServerName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                                 long&nbsp;timeout)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#waitForMasterToStop-org.apache.hadoop.hbase.ServerName-long-" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
@@ -1134,7 +1262,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>startRegionServer</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.RegionServerThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.378">startRegionServer</a>()
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.RegionServerThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.404">startRegionServer</a>()
                                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts a region server thread running</div>
 <dl>
@@ -1151,7 +1279,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>startRegionServerAndWait</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.RegionServerThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.402">startRegionServerAndWait</a>(long&nbsp;timeout)
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.RegionServerThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.428">startRegionServerAndWait</a>(long&nbsp;timeout)
                                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts a region server thread and waits until its processed by master. Throws an exception
  when it can't start a region server or when the region server is not processed by master
@@ -1170,7 +1298,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>abortRegionServer</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.427">abortRegionServer</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.453">abortRegionServer</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Cause a region server to exit doing basic clean up only on its way out.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1184,7 +1312,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>stopRegionServer</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.RegionServerThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.440">stopRegionServer</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.RegionServerThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.466">stopRegionServer</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Shut down the specified region server cleanly</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1200,7 +1328,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>stopRegionServer</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.RegionServerThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.454">stopRegionServer</a>(int&nbsp;serverNumber,
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.RegionServerThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.480">stopRegionServer</a>(int&nbsp;serverNumber,
                                                           boolean&nbsp;shutdownFS)</pre>
 <div class="block">Shut down the specified region server cleanly</div>
 <dl>
@@ -1221,7 +1349,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>waitOnRegionServer</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.469">waitOnRegionServer</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.495">waitOnRegionServer</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Wait for the specified region server to stop. Removes this thread from list
  of running threads.</div>
 <dl>
@@ -1238,7 +1366,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>startMaster</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.479">startMaster</a>()
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.505">startMaster</a>()
                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts a master thread running</div>
 <dl>
@@ -1255,7 +1383,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getMasterAdminService</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/shaded/protobuf/generated.MasterProtos.MasterService.BlockingInterface.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.shaded.protobuf">org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.499">getMasterAdminService</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/shaded/protobuf/generated.MasterProtos.MasterService.BlockingInterface.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.shaded.protobuf">org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.525">getMasterAdminService</a>()</pre>
 <div class="block">Returns the current active master, if available.</div>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -1271,7 +1399,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getMaster</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/master/HMaster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.507">getMaster</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/master/HMaster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.533">getMaster</a>()</pre>
 <div class="block">Returns the current active master, if available.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -1285,7 +1413,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getMasterThread</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.515">getMasterThread</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.541">getMasterThread</a>()</pre>
 <div class="block">Returns the current active master thread, if available.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -1299,7 +1427,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getMaster</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/master/HMaster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.528">getMaster</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/master/HMaster.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.554">getMaster</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Returns the master at the specified index, if available.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -1313,7 +1441,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>abortMaster</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.536">abortMaster</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.562">abortMaster</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Cause a master to exit without shutting down entire cluster.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1327,7 +1455,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>stopMaster</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.549">stopMaster</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.575">stopMaster</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Shut down the specified master cleanly</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1343,7 +1471,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>stopMaster</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.563">stopMaster</a>(int&nbsp;serverNumber,
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.589">stopMaster</a>(int&nbsp;serverNumber,
                                               boolean&nbsp;shutdownFS)</pre>
 <div class="block">Shut down the specified master cleanly</div>
 <dl>
@@ -1364,7 +1492,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>waitOnMaster</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.578">waitOnMaster</a>(int&nbsp;serverNumber)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.604">waitOnMaster</a>(int&nbsp;serverNumber)</pre>
 <div class="block">Wait for the specified master to stop. Removes this thread from list
  of running threads.</div>
 <dl>
@@ -1381,7 +1509,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForActiveAndReadyMaster</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.591">waitForActiveAndReadyMaster</a>(long&nbsp;timeout)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.617">waitForActiveAndReadyMaster</a>(long&nbsp;timeout)
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Blocks until there is an active master and that master has completed
  initialization.</div>
@@ -1405,7 +1533,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getMasterThreads</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a>&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.610">getMasterThreads</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a>&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.636">getMasterThreads</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>List of master threads.</dd>
@@ -1418,7 +1546,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getLiveMasterThreads</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a>&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.617">getLiveMasterThreads</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../apidocs/org/apache/hadoop/hbase/util/JVMClusterUtil.MasterThread.html?is-external=true" title="class or interface in org.apache.hadoop.hbase.util">JVMClusterUtil.MasterThread</a>&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.643">getLiveMasterThreads</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>List of live master threads (skips the aborted and the killed)</dd>
@@ -1431,7 +1559,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>join</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.624">join</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.650">join</a>()</pre>
 <div class="block">Wait for Mini HBase Cluster to shut down.</div>
 </li>
 </ul>
@@ -1441,7 +1569,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>shutdown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.632">shutdown</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.658">shutdown</a>()
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Shut down the mini HBase cluster</div>
 <dl>
@@ -1458,7 +1586,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.639">close</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.665">close</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#close--" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
 <div class="block">Closes all the resources held open for this cluster. Note that this call does not shutdown
@@ -1484,7 +1612,7 @@ extends <a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.htm
 <li class="blockList">
 <h4>getClusterStatus</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/ClusterStatus.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ClusterStatus</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.647">getClusterStatus</a>()
+public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/ClusterStatus.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ClusterStatus</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.673">getClusterStatus</a>()
                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">As of release 2.0.0, this will be removed in HBase 3.0.0
              Use <a href="../../../../org/apache/hadoop/hbase/MiniHBaseCluster.html#getClusterMetrics--"><code>getClusterMetrics()</code></a> instead.</span></div>
@@ -1500,7 +1628,7 @@ public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/ClusterStatu
 <ul class="blockList">
 <li class="blockList">
 <h4>getClusterMetrics</h4>
-<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/ClusterMetrics.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ClusterMetrics</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.653">getClusterMetrics</a>()
+<pre>public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/ClusterMetrics.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">ClusterMetrics</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.679">getClusterMetrics</a>()
                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../apidocs/org/apache/hadoop/hbase/HBaseCluster.html?is-external=true#getClusterMetrics--" title="class or interface in org.apache.hadoop.hbase">org.apache.hadoop.hbase.HBaseCluster</a></code></span></div>
 <div class="block">Returns a ClusterMetrics for this HBase cluster.</div>
@@ -1520,7 +1648,7 @@ public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/ClusterStatu
 <ul class="blockList">
 <li class="blockList">
 <h4>flushcache</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.672">flushcache</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.698">flushcache</a>()
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Call flushCache on all regions on all participating regionservers.</div>
 <dl>
@@ -1535,7 +1663,7 @@ public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/ClusterStatu
 <ul class="blockList">
 <li class="blockList">
 <h4>flushcache</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.683">flushcache</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/TableName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.709">flushcache</a>(<a href="../../../../../apidocs/org/apache/hadoop/hbase/TableName.html?is-external=true" title="class or interface in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Call flushCache on all regions of the specified table.</div>
 <dl>
@@ -1550,7 +1678,7 @@ public&nbsp;<a href="../../../../../apidocs/org/apache/hadoop/hbase/ClusterStatu


<TRUNCATED>

[08/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALFactory.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALFactory.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALFactory.html
index d2d8da1..5bbbf0c 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALFactory.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALFactory.html
@@ -90,391 +90,392 @@
 <span class="sourceLineNo">082</span>  static final String DEFAULT_WAL_PROVIDER = Providers.defaultProvider.name();<a name="line.82"></a>
 <span class="sourceLineNo">083</span><a name="line.83"></a>
 <span class="sourceLineNo">084</span>  public static final String META_WAL_PROVIDER = "hbase.wal.meta_provider";<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  static final String DEFAULT_META_WAL_PROVIDER = Providers.defaultProvider.name();<a name="line.85"></a>
-<span class="sourceLineNo">086</span><a name="line.86"></a>
-<span class="sourceLineNo">087</span>  final String factoryId;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  private final WALProvider provider;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  // The meta updates are written to a different wal. If this<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  // regionserver holds meta regions, then this ref will be non-null.<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  // lazily intialized; most RegionServers don't deal with META<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  private final AtomicReference&lt;WALProvider&gt; metaProvider = new AtomicReference&lt;&gt;();<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>  /**<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   * Configuration-specified WAL Reader used when a custom reader is requested<a name="line.95"></a>
-<span class="sourceLineNo">096</span>   */<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  private final Class&lt;? extends AbstractFSWALProvider.Reader&gt; logReaderClass;<a name="line.97"></a>
-<span class="sourceLineNo">098</span><a name="line.98"></a>
-<span class="sourceLineNo">099</span>  /**<a name="line.99"></a>
-<span class="sourceLineNo">100</span>   * How long to attempt opening in-recovery wals<a name="line.100"></a>
-<span class="sourceLineNo">101</span>   */<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  private final int timeoutMillis;<a name="line.102"></a>
-<span class="sourceLineNo">103</span><a name="line.103"></a>
-<span class="sourceLineNo">104</span>  private final Configuration conf;<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  // Used for the singleton WALFactory, see below.<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private WALFactory(Configuration conf) {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    // this code is duplicated here so we can keep our members final.<a name="line.108"></a>
-<span class="sourceLineNo">109</span>    // until we've moved reader/writer construction down into providers, this initialization must<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    // happen prior to provider initialization, in case they need to instantiate a reader/writer.<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    timeoutMillis = conf.getInt("hbase.hlog.open.timeout", 300000);<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    /* TODO Both of these are probably specific to the fs wal provider */<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    logReaderClass = conf.getClass("hbase.regionserver.hlog.reader.impl", ProtobufLogReader.class,<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      AbstractFSWALProvider.Reader.class);<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    this.conf = conf;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    // end required early initialization<a name="line.116"></a>
-<span class="sourceLineNo">117</span><a name="line.117"></a>
-<span class="sourceLineNo">118</span>    // this instance can't create wals, just reader/writers.<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    provider = null;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    factoryId = SINGLETON_ID;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  }<a name="line.121"></a>
-<span class="sourceLineNo">122</span><a name="line.122"></a>
-<span class="sourceLineNo">123</span>  @VisibleForTesting<a name="line.123"></a>
-<span class="sourceLineNo">124</span>  public Class&lt;? extends WALProvider&gt; getProviderClass(String key, String defaultValue) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    try {<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      Providers provider = Providers.valueOf(conf.get(key, defaultValue));<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      if (provider != Providers.defaultProvider) {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        // User gives a wal provider explicitly, just use that one<a name="line.128"></a>
-<span class="sourceLineNo">129</span>        return provider.clazz;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>      }<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      // AsyncFSWAL has better performance in most cases, and also uses less resources, we will try<a name="line.131"></a>
-<span class="sourceLineNo">132</span>      // to use it if possible. But it deeply hacks into the internal of DFSClient so will be easily<a name="line.132"></a>
-<span class="sourceLineNo">133</span>      // broken when upgrading hadoop. If it is broken, then we fall back to use FSHLog.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      if (AsyncFSWALProvider.load()) {<a name="line.134"></a>
-<span class="sourceLineNo">135</span>        return AsyncFSWALProvider.class;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>      } else {<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        return FSHLogProvider.class;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>      }<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    } catch (IllegalArgumentException exception) {<a name="line.139"></a>
-<span class="sourceLineNo">140</span>      // Fall back to them specifying a class name<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      // Note that the passed default class shouldn't actually be used, since the above only fails<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      // when there is a config value present.<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      return conf.getClass(key, Providers.defaultProvider.clazz, WALProvider.class);<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    }<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  static WALProvider createProvider(Class&lt;? extends WALProvider&gt; clazz) throws IOException {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    LOG.info("Instantiating WALProvider of type {}", clazz);<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    try {<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      return clazz.getDeclaredConstructor().newInstance();<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    } catch (Exception e) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      LOG.error("couldn't set up WALProvider, the configured class is " + clazz);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      LOG.debug("Exception details for failure to load WALProvider.", e);<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      throw new IOException("couldn't set up WALProvider", e);<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    }<a name="line.155"></a>
-<span class="sourceLineNo">156</span>  }<a name="line.156"></a>
-<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span>  /**<a name="line.158"></a>
-<span class="sourceLineNo">159</span>   * @param conf must not be null, will keep a reference to read params in later reader/writer<a name="line.159"></a>
-<span class="sourceLineNo">160</span>   *          instances.<a name="line.160"></a>
-<span class="sourceLineNo">161</span>   * @param factoryId a unique identifier for this factory. used i.e. by filesystem implementations<a name="line.161"></a>
-<span class="sourceLineNo">162</span>   *          to make a directory<a name="line.162"></a>
-<span class="sourceLineNo">163</span>   */<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  public WALFactory(Configuration conf, String factoryId) throws IOException {<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    // default enableSyncReplicationWALProvider is true, only disable SyncReplicationWALProvider<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    // for HMaster or HRegionServer which take system table only. See HBASE-19999<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    this(conf, factoryId, true);<a name="line.167"></a>
-<span class="sourceLineNo">168</span>  }<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>  /**<a name="line.170"></a>
-<span class="sourceLineNo">171</span>   * @param conf must not be null, will keep a reference to read params in later reader/writer<a name="line.171"></a>
-<span class="sourceLineNo">172</span>   *          instances.<a name="line.172"></a>
-<span class="sourceLineNo">173</span>   * @param factoryId a unique identifier for this factory. used i.e. by filesystem implementations<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   *          to make a directory<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param enableSyncReplicationWALProvider whether wrap the wal provider to a<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   *          {@link SyncReplicationWALProvider}<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   */<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  public WALFactory(Configuration conf, String factoryId, boolean enableSyncReplicationWALProvider)<a name="line.178"></a>
-<span class="sourceLineNo">179</span>      throws IOException {<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    // until we've moved reader/writer construction down into providers, this initialization must<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    // happen prior to provider initialization, in case they need to instantiate a reader/writer.<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    timeoutMillis = conf.getInt("hbase.hlog.open.timeout", 300000);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    /* TODO Both of these are probably specific to the fs wal provider */<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    logReaderClass = conf.getClass("hbase.regionserver.hlog.reader.impl", ProtobufLogReader.class,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      AbstractFSWALProvider.Reader.class);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    this.conf = conf;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    this.factoryId = factoryId;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    // end required early initialization<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    if (conf.getBoolean("hbase.regionserver.hlog.enabled", true)) {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>      WALProvider provider = createProvider(getProviderClass(WAL_PROVIDER, DEFAULT_WAL_PROVIDER));<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      if (enableSyncReplicationWALProvider) {<a name="line.191"></a>
-<span class="sourceLineNo">192</span>        provider = new SyncReplicationWALProvider(provider);<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      }<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      provider.init(this, conf, null);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      provider.addWALActionsListener(new MetricsWAL());<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      this.provider = provider;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    } else {<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      // special handling of existing configuration behavior.<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      LOG.warn("Running with WAL disabled.");<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      provider = new DisabledWALProvider();<a name="line.200"></a>
-<span class="sourceLineNo">201</span>      provider.init(this, conf, factoryId);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>    }<a name="line.202"></a>
-<span class="sourceLineNo">203</span>  }<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>  /**<a name="line.205"></a>
-<span class="sourceLineNo">206</span>   * Shutdown all WALs and clean up any underlying storage.<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * Use only when you will not need to replay and edits that have gone to any wals from this<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   * factory.<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   */<a name="line.209"></a>
-<span class="sourceLineNo">210</span>  public void close() throws IOException {<a name="line.210"></a>
-<span class="sourceLineNo">211</span>    final WALProvider metaProvider = this.metaProvider.get();<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    if (null != metaProvider) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      metaProvider.close();<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    // close is called on a WALFactory with null provider in the case of contention handling<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    // within the getInstance method.<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    if (null != provider) {<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      provider.close();<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    }<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  }<a name="line.220"></a>
-<span class="sourceLineNo">221</span><a name="line.221"></a>
-<span class="sourceLineNo">222</span>  /**<a name="line.222"></a>
-<span class="sourceLineNo">223</span>   * Tell the underlying WAL providers to shut down, but do not clean up underlying storage.<a name="line.223"></a>
-<span class="sourceLineNo">224</span>   * If you are not ending cleanly and will need to replay edits from this factory's wals,<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * use this method if you can as it will try to leave things as tidy as possible.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
-<span class="sourceLineNo">227</span>  public void shutdown() throws IOException {<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    IOException exception = null;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    final WALProvider metaProvider = this.metaProvider.get();<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    if (null != metaProvider) {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      try {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>        metaProvider.shutdown();<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      } catch(IOException ioe) {<a name="line.233"></a>
-<span class="sourceLineNo">234</span>        exception = ioe;<a name="line.234"></a>
-<span class="sourceLineNo">235</span>      }<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    provider.shutdown();<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    if (null != exception) {<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      throw exception;<a name="line.239"></a>
-<span class="sourceLineNo">240</span>    }<a name="line.240"></a>
-<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
-<span class="sourceLineNo">242</span><a name="line.242"></a>
-<span class="sourceLineNo">243</span>  public List&lt;WAL&gt; getWALs() {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>    return provider.getWALs();<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  }<a name="line.245"></a>
-<span class="sourceLineNo">246</span><a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private WALProvider getMetaProvider() throws IOException {<a name="line.247"></a>
+<span class="sourceLineNo">085</span><a name="line.85"></a>
+<span class="sourceLineNo">086</span>  final String factoryId;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  private final WALProvider provider;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  // The meta updates are written to a different wal. If this<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  // regionserver holds meta regions, then this ref will be non-null.<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  // lazily intialized; most RegionServers don't deal with META<a name="line.90"></a>
+<span class="sourceLineNo">091</span>  private final AtomicReference&lt;WALProvider&gt; metaProvider = new AtomicReference&lt;&gt;();<a name="line.91"></a>
+<span class="sourceLineNo">092</span><a name="line.92"></a>
+<span class="sourceLineNo">093</span>  /**<a name="line.93"></a>
+<span class="sourceLineNo">094</span>   * Configuration-specified WAL Reader used when a custom reader is requested<a name="line.94"></a>
+<span class="sourceLineNo">095</span>   */<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  private final Class&lt;? extends AbstractFSWALProvider.Reader&gt; logReaderClass;<a name="line.96"></a>
+<span class="sourceLineNo">097</span><a name="line.97"></a>
+<span class="sourceLineNo">098</span>  /**<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   * How long to attempt opening in-recovery wals<a name="line.99"></a>
+<span class="sourceLineNo">100</span>   */<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private final int timeoutMillis;<a name="line.101"></a>
+<span class="sourceLineNo">102</span><a name="line.102"></a>
+<span class="sourceLineNo">103</span>  private final Configuration conf;<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>  // Used for the singleton WALFactory, see below.<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private WALFactory(Configuration conf) {<a name="line.106"></a>
+<span class="sourceLineNo">107</span>    // this code is duplicated here so we can keep our members final.<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    // until we've moved reader/writer construction down into providers, this initialization must<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    // happen prior to provider initialization, in case they need to instantiate a reader/writer.<a name="line.109"></a>
+<span class="sourceLineNo">110</span>    timeoutMillis = conf.getInt("hbase.hlog.open.timeout", 300000);<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    /* TODO Both of these are probably specific to the fs wal provider */<a name="line.111"></a>
+<span class="sourceLineNo">112</span>    logReaderClass = conf.getClass("hbase.regionserver.hlog.reader.impl", ProtobufLogReader.class,<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      AbstractFSWALProvider.Reader.class);<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    this.conf = conf;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    // end required early initialization<a name="line.115"></a>
+<span class="sourceLineNo">116</span><a name="line.116"></a>
+<span class="sourceLineNo">117</span>    // this instance can't create wals, just reader/writers.<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    provider = null;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    factoryId = SINGLETON_ID;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  @VisibleForTesting<a name="line.122"></a>
+<span class="sourceLineNo">123</span>  public Class&lt;? extends WALProvider&gt; getProviderClass(String key, String defaultValue) {<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    try {<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      Providers provider = Providers.valueOf(conf.get(key, defaultValue));<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      if (provider != Providers.defaultProvider) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>        // User gives a wal provider explicitly, just use that one<a name="line.127"></a>
+<span class="sourceLineNo">128</span>        return provider.clazz;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      }<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      // AsyncFSWAL has better performance in most cases, and also uses less resources, we will try<a name="line.130"></a>
+<span class="sourceLineNo">131</span>      // to use it if possible. But it deeply hacks into the internal of DFSClient so will be easily<a name="line.131"></a>
+<span class="sourceLineNo">132</span>      // broken when upgrading hadoop. If it is broken, then we fall back to use FSHLog.<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      if (AsyncFSWALProvider.load()) {<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        return AsyncFSWALProvider.class;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      } else {<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        return FSHLogProvider.class;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>      }<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    } catch (IllegalArgumentException exception) {<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      // Fall back to them specifying a class name<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      // Note that the passed default class shouldn't actually be used, since the above only fails<a name="line.140"></a>
+<span class="sourceLineNo">141</span>      // when there is a config value present.<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      return conf.getClass(key, Providers.defaultProvider.clazz, WALProvider.class);<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    }<a name="line.143"></a>
+<span class="sourceLineNo">144</span>  }<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>  static WALProvider createProvider(Class&lt;? extends WALProvider&gt; clazz) throws IOException {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    LOG.info("Instantiating WALProvider of type {}", clazz);<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    try {<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      return clazz.getDeclaredConstructor().newInstance();<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    } catch (Exception e) {<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      LOG.error("couldn't set up WALProvider, the configured class is " + clazz);<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      LOG.debug("Exception details for failure to load WALProvider.", e);<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      throw new IOException("couldn't set up WALProvider", e);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    }<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>  /**<a name="line.157"></a>
+<span class="sourceLineNo">158</span>   * @param conf must not be null, will keep a reference to read params in later reader/writer<a name="line.158"></a>
+<span class="sourceLineNo">159</span>   *          instances.<a name="line.159"></a>
+<span class="sourceLineNo">160</span>   * @param factoryId a unique identifier for this factory. used i.e. by filesystem implementations<a name="line.160"></a>
+<span class="sourceLineNo">161</span>   *          to make a directory<a name="line.161"></a>
+<span class="sourceLineNo">162</span>   */<a name="line.162"></a>
+<span class="sourceLineNo">163</span>  public WALFactory(Configuration conf, String factoryId) throws IOException {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    // default enableSyncReplicationWALProvider is true, only disable SyncReplicationWALProvider<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    // for HMaster or HRegionServer which take system table only. See HBASE-19999<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    this(conf, factoryId, true);<a name="line.166"></a>
+<span class="sourceLineNo">167</span>  }<a name="line.167"></a>
+<span class="sourceLineNo">168</span><a name="line.168"></a>
+<span class="sourceLineNo">169</span>  /**<a name="line.169"></a>
+<span class="sourceLineNo">170</span>   * @param conf must not be null, will keep a reference to read params in later reader/writer<a name="line.170"></a>
+<span class="sourceLineNo">171</span>   *          instances.<a name="line.171"></a>
+<span class="sourceLineNo">172</span>   * @param factoryId a unique identifier for this factory. used i.e. by filesystem implementations<a name="line.172"></a>
+<span class="sourceLineNo">173</span>   *          to make a directory<a name="line.173"></a>
+<span class="sourceLineNo">174</span>   * @param enableSyncReplicationWALProvider whether wrap the wal provider to a<a name="line.174"></a>
+<span class="sourceLineNo">175</span>   *          {@link SyncReplicationWALProvider}<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   */<a name="line.176"></a>
+<span class="sourceLineNo">177</span>  public WALFactory(Configuration conf, String factoryId, boolean enableSyncReplicationWALProvider)<a name="line.177"></a>
+<span class="sourceLineNo">178</span>      throws IOException {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    // until we've moved reader/writer construction down into providers, this initialization must<a name="line.179"></a>
+<span class="sourceLineNo">180</span>    // happen prior to provider initialization, in case they need to instantiate a reader/writer.<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    timeoutMillis = conf.getInt("hbase.hlog.open.timeout", 300000);<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    /* TODO Both of these are probably specific to the fs wal provider */<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    logReaderClass = conf.getClass("hbase.regionserver.hlog.reader.impl", ProtobufLogReader.class,<a name="line.183"></a>
+<span class="sourceLineNo">184</span>      AbstractFSWALProvider.Reader.class);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    this.conf = conf;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    this.factoryId = factoryId;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>    // end required early initialization<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    if (conf.getBoolean("hbase.regionserver.hlog.enabled", true)) {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>      WALProvider provider = createProvider(getProviderClass(WAL_PROVIDER, DEFAULT_WAL_PROVIDER));<a name="line.189"></a>
+<span class="sourceLineNo">190</span>      if (enableSyncReplicationWALProvider) {<a name="line.190"></a>
+<span class="sourceLineNo">191</span>        provider = new SyncReplicationWALProvider(provider);<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      }<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      provider.init(this, conf, null);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      provider.addWALActionsListener(new MetricsWAL());<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      this.provider = provider;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    } else {<a name="line.196"></a>
+<span class="sourceLineNo">197</span>      // special handling of existing configuration behavior.<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      LOG.warn("Running with WAL disabled.");<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      provider = new DisabledWALProvider();<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      provider.init(this, conf, factoryId);<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
+<span class="sourceLineNo">202</span>  }<a name="line.202"></a>
+<span class="sourceLineNo">203</span><a name="line.203"></a>
+<span class="sourceLineNo">204</span>  /**<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * Shutdown all WALs and clean up any underlying storage.<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * Use only when you will not need to replay and edits that have gone to any wals from this<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * factory.<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public void close() throws IOException {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>    final WALProvider metaProvider = this.metaProvider.get();<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    if (null != metaProvider) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      metaProvider.close();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    }<a name="line.213"></a>
+<span class="sourceLineNo">214</span>    // close is called on a WALFactory with null provider in the case of contention handling<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    // within the getInstance method.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    if (null != provider) {<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      provider.close();<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    }<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  }<a name="line.219"></a>
+<span class="sourceLineNo">220</span><a name="line.220"></a>
+<span class="sourceLineNo">221</span>  /**<a name="line.221"></a>
+<span class="sourceLineNo">222</span>   * Tell the underlying WAL providers to shut down, but do not clean up underlying storage.<a name="line.222"></a>
+<span class="sourceLineNo">223</span>   * If you are not ending cleanly and will need to replay edits from this factory's wals,<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * use this method if you can as it will try to leave things as tidy as possible.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   */<a name="line.225"></a>
+<span class="sourceLineNo">226</span>  public void shutdown() throws IOException {<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    IOException exception = null;<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    final WALProvider metaProvider = this.metaProvider.get();<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    if (null != metaProvider) {<a name="line.229"></a>
+<span class="sourceLineNo">230</span>      try {<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        metaProvider.shutdown();<a name="line.231"></a>
+<span class="sourceLineNo">232</span>      } catch(IOException ioe) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>        exception = ioe;<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      }<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    provider.shutdown();<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    if (null != exception) {<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      throw exception;<a name="line.238"></a>
+<span class="sourceLineNo">239</span>    }<a name="line.239"></a>
+<span class="sourceLineNo">240</span>  }<a name="line.240"></a>
+<span class="sourceLineNo">241</span><a name="line.241"></a>
+<span class="sourceLineNo">242</span>  public List&lt;WAL&gt; getWALs() {<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    return provider.getWALs();<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  }<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>  @VisibleForTesting<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  WALProvider getMetaProvider() throws IOException {<a name="line.247"></a>
 <span class="sourceLineNo">248</span>    for (;;) {<a name="line.248"></a>
 <span class="sourceLineNo">249</span>      WALProvider provider = this.metaProvider.get();<a name="line.249"></a>
 <span class="sourceLineNo">250</span>      if (provider != null) {<a name="line.250"></a>
 <span class="sourceLineNo">251</span>        return provider;<a name="line.251"></a>
 <span class="sourceLineNo">252</span>      }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      provider = createProvider(getProviderClass(META_WAL_PROVIDER, DEFAULT_META_WAL_PROVIDER));<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      provider.init(this, conf, AbstractFSWALProvider.META_WAL_PROVIDER_ID);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      provider.addWALActionsListener(new MetricsWAL());<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      if (metaProvider.compareAndSet(null, provider)) {<a name="line.256"></a>
-<span class="sourceLineNo">257</span>        return provider;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      } else {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>        // someone is ahead of us, close and try again.<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        provider.close();<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      }<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    }<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  }<a name="line.263"></a>
-<span class="sourceLineNo">264</span><a name="line.264"></a>
-<span class="sourceLineNo">265</span>  /**<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * @param region the region which we want to get a WAL for it. Could be null.<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   */<a name="line.267"></a>
-<span class="sourceLineNo">268</span>  public WAL getWAL(RegionInfo region) throws IOException {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    // use different WAL for hbase:meta<a name="line.269"></a>
-<span class="sourceLineNo">270</span>    if (region != null &amp;&amp; region.isMetaRegion() &amp;&amp;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      region.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      return getMetaProvider().getWAL(region);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    } else {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return provider.getWAL(region);<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  }<a name="line.276"></a>
-<span class="sourceLineNo">277</span><a name="line.277"></a>
-<span class="sourceLineNo">278</span>  public Reader createReader(final FileSystem fs, final Path path) throws IOException {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    return createReader(fs, path, (CancelableProgressable)null);<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
-<span class="sourceLineNo">283</span>   * Create a reader for the WAL. If you are reading from a file that's being written to and need<a name="line.283"></a>
-<span class="sourceLineNo">284</span>   * to reopen it multiple times, use {@link WAL.Reader#reset()} instead of this method<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * then just seek back to the last known good position.<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * @return A WAL reader.  Close when done with it.<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   * @throws IOException<a name="line.287"></a>
-<span class="sourceLineNo">288</span>   */<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  public Reader createReader(final FileSystem fs, final Path path,<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      CancelableProgressable reporter) throws IOException {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    return createReader(fs, path, reporter, true);<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>  public Reader createReader(final FileSystem fs, final Path path, CancelableProgressable reporter,<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      boolean allowCustom) throws IOException {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    Class&lt;? extends AbstractFSWALProvider.Reader&gt; lrClass =<a name="line.296"></a>
-<span class="sourceLineNo">297</span>        allowCustom ? logReaderClass : ProtobufLogReader.class;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    try {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      // A wal file could be under recovery, so it may take several<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      // tries to get it open. Instead of claiming it is corrupted, retry<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      // to open it up to 5 minutes by default.<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      long startWaiting = EnvironmentEdgeManager.currentTime();<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      long openTimeout = timeoutMillis + startWaiting;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      int nbAttempt = 0;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      AbstractFSWALProvider.Reader reader = null;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      while (true) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        try {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          reader = lrClass.getDeclaredConstructor().newInstance();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>          reader.init(fs, path, conf, null);<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          return reader;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>        } catch (IOException e) {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>          if (reader != null) {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>            try {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>              reader.close();<a name="line.314"></a>
-<span class="sourceLineNo">315</span>            } catch (IOException exception) {<a name="line.315"></a>
-<span class="sourceLineNo">316</span>              LOG.warn("Could not close FSDataInputStream" + exception.getMessage());<a name="line.316"></a>
-<span class="sourceLineNo">317</span>              LOG.debug("exception details", exception);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>            }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>          }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>          String msg = e.getMessage();<a name="line.321"></a>
-<span class="sourceLineNo">322</span>          if (msg != null<a name="line.322"></a>
-<span class="sourceLineNo">323</span>              &amp;&amp; (msg.contains("Cannot obtain block length")<a name="line.323"></a>
-<span class="sourceLineNo">324</span>                  || msg.contains("Could not obtain the last block") || msg<a name="line.324"></a>
-<span class="sourceLineNo">325</span>                    .matches("Blocklist for [^ ]* has changed.*"))) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>            if (++nbAttempt == 1) {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>              LOG.warn("Lease should have recovered. This is not expected. Will retry", e);<a name="line.327"></a>
-<span class="sourceLineNo">328</span>            }<a name="line.328"></a>
-<span class="sourceLineNo">329</span>            if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>              throw new InterruptedIOException("Operation is cancelled");<a name="line.330"></a>
-<span class="sourceLineNo">331</span>            }<a name="line.331"></a>
-<span class="sourceLineNo">332</span>            if (nbAttempt &gt; 2 &amp;&amp; openTimeout &lt; EnvironmentEdgeManager.currentTime()) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>              LOG.error("Can't open after " + nbAttempt + " attempts and "<a name="line.333"></a>
-<span class="sourceLineNo">334</span>                  + (EnvironmentEdgeManager.currentTime() - startWaiting) + "ms " + " for " + path);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>            } else {<a name="line.335"></a>
-<span class="sourceLineNo">336</span>              try {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>                Thread.sleep(nbAttempt &lt; 3 ? 500 : 1000);<a name="line.337"></a>
-<span class="sourceLineNo">338</span>                continue; // retry<a name="line.338"></a>
-<span class="sourceLineNo">339</span>              } catch (InterruptedException ie) {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>                InterruptedIOException iioe = new InterruptedIOException();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>                iioe.initCause(ie);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>                throw iioe;<a name="line.342"></a>
-<span class="sourceLineNo">343</span>              }<a name="line.343"></a>
-<span class="sourceLineNo">344</span>            }<a name="line.344"></a>
-<span class="sourceLineNo">345</span>            throw new LeaseNotRecoveredException(e);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>          } else {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>            throw e;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          }<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        }<a name="line.349"></a>
-<span class="sourceLineNo">350</span>      }<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    } catch (IOException ie) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      throw ie;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } catch (Exception e) {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      throw new IOException("Cannot get log reader", e);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    }<a name="line.355"></a>
-<span class="sourceLineNo">356</span>  }<a name="line.356"></a>
-<span class="sourceLineNo">357</span><a name="line.357"></a>
-<span class="sourceLineNo">358</span>  /**<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * Create a writer for the WAL.<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * Uses defaults.<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * &lt;p&gt;<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Should be package-private. public only for tests and<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * {@link org.apache.hadoop.hbase.regionserver.wal.Compressor}<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @return A WAL writer. Close when done with it.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   */<a name="line.365"></a>
-<span class="sourceLineNo">366</span>  public Writer createWALWriter(final FileSystem fs, final Path path) throws IOException {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    return FSHLogProvider.createWriter(conf, fs, path, false);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  }<a name="line.368"></a>
-<span class="sourceLineNo">369</span><a name="line.369"></a>
-<span class="sourceLineNo">370</span>  /**<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * Should be package-private, visible for recovery testing.<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   * Uses defaults.<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @return an overwritable writer for recovered edits. caller should close.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
-<span class="sourceLineNo">375</span>  @VisibleForTesting<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public Writer createRecoveredEditsWriter(final FileSystem fs, final Path path)<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      throws IOException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    return FSHLogProvider.createWriter(conf, fs, path, true);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>  }<a name="line.379"></a>
-<span class="sourceLineNo">380</span><a name="line.380"></a>
-<span class="sourceLineNo">381</span>  // These static methods are currently used where it's impractical to<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  // untangle the reliance on state in the filesystem. They rely on singleton<a name="line.382"></a>
-<span class="sourceLineNo">383</span>  // WALFactory that just provides Reader / Writers.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  // For now, first Configuration object wins. Practically this just impacts the reader/writer class<a name="line.384"></a>
-<span class="sourceLineNo">385</span>  private static final AtomicReference&lt;WALFactory&gt; singleton = new AtomicReference&lt;&gt;();<a name="line.385"></a>
-<span class="sourceLineNo">386</span>  private static final String SINGLETON_ID = WALFactory.class.getName();<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  <a name="line.387"></a>
-<span class="sourceLineNo">388</span>  // Public only for FSHLog<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static WALFactory getInstance(Configuration configuration) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    WALFactory factory = singleton.get();<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    if (null == factory) {<a name="line.391"></a>
-<span class="sourceLineNo">392</span>      WALFactory temp = new WALFactory(configuration);<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      if (singleton.compareAndSet(null, temp)) {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        factory = temp;<a name="line.394"></a>
-<span class="sourceLineNo">395</span>      } else {<a name="line.395"></a>
-<span class="sourceLineNo">396</span>        // someone else beat us to initializing<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        try {<a name="line.397"></a>
-<span class="sourceLineNo">398</span>          temp.close();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>        } catch (IOException exception) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>          LOG.debug("failed to close temporary singleton. ignoring.", exception);<a name="line.400"></a>
-<span class="sourceLineNo">401</span>        }<a name="line.401"></a>
-<span class="sourceLineNo">402</span>        factory = singleton.get();<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      }<a name="line.403"></a>
-<span class="sourceLineNo">404</span>    }<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    return factory;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * Create a reader for the given path, accept custom reader classes from conf.<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return a WAL Reader, caller must close.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  public static Reader createReader(final FileSystem fs, final Path path,<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      final Configuration configuration) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return getInstance(configuration).createReader(fs, path);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  /**<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * Create a reader for the given path, accept custom reader classes from conf.<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @return a WAL Reader, caller must close.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   */<a name="line.422"></a>
-<span class="sourceLineNo">423</span>  static Reader createReader(final FileSystem fs, final Path path,<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      final Configuration configuration, final CancelableProgressable reporter) throws IOException {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>    return getInstance(configuration).createReader(fs, path, reporter);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>  }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>  /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>   * Create a reader for the given path, ignore custom reader classes from conf.<a name="line.429"></a>
-<span class="sourceLineNo">430</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * only public pending move of {@link org.apache.hadoop.hbase.regionserver.wal.Compressor}<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * @return a WAL Reader, caller must close.<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   */<a name="line.433"></a>
-<span class="sourceLineNo">434</span>  public static Reader createReaderIgnoreCustomClass(final FileSystem fs, final Path path,<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      final Configuration configuration) throws IOException {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    return getInstance(configuration).createReader(fs, path, null, false);<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  /**<a name="line.439"></a>
-<span class="sourceLineNo">440</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.440"></a>
-<span class="sourceLineNo">441</span>   * Uses defaults.<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * @return a Writer that will overwrite files. Caller must close.<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  static Writer createRecoveredEditsWriter(final FileSystem fs, final Path path,<a name="line.444"></a>
-<span class="sourceLineNo">445</span>      final Configuration configuration)<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      throws IOException {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>    return FSHLogProvider.createWriter(configuration, fs, path, true);<a name="line.447"></a>
-<span class="sourceLineNo">448</span>  }<a name="line.448"></a>
-<span class="sourceLineNo">449</span><a name="line.449"></a>
-<span class="sourceLineNo">450</span>  /**<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * Uses defaults.<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   * @return a writer that won't overwrite files. Caller must close.<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   */<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  @VisibleForTesting<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  public static Writer createWALWriter(final FileSystem fs, final Path path,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      final Configuration configuration)<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      throws IOException {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    return FSHLogProvider.createWriter(configuration, fs, path, false);<a name="line.459"></a>
-<span class="sourceLineNo">460</span>  }<a name="line.460"></a>
-<span class="sourceLineNo">461</span><a name="line.461"></a>
-<span class="sourceLineNo">462</span>  public final WALProvider getWALProvider() {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    return this.provider;<a name="line.463"></a>
-<span class="sourceLineNo">464</span>  }<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>  public final WALProvider getMetaWALProvider() {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    return this.metaProvider.get();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>}<a name="line.469"></a>
+<span class="sourceLineNo">253</span>      provider = createProvider(getProviderClass(META_WAL_PROVIDER,<a name="line.253"></a>
+<span class="sourceLineNo">254</span>          conf.get(WAL_PROVIDER, DEFAULT_WAL_PROVIDER)));<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      provider.init(this, conf, AbstractFSWALProvider.META_WAL_PROVIDER_ID);<a name="line.255"></a>
+<span class="sourceLineNo">256</span>      provider.addWALActionsListener(new MetricsWAL());<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      if (metaProvider.compareAndSet(null, provider)) {<a name="line.257"></a>
+<span class="sourceLineNo">258</span>        return provider;<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      } else {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>        // someone is ahead of us, close and try again.<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        provider.close();<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      }<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    }<a name="line.263"></a>
+<span class="sourceLineNo">264</span>  }<a name="line.264"></a>
+<span class="sourceLineNo">265</span><a name="line.265"></a>
+<span class="sourceLineNo">266</span>  /**<a name="line.266"></a>
+<span class="sourceLineNo">267</span>   * @param region the region which we want to get a WAL for it. Could be null.<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   */<a name="line.268"></a>
+<span class="sourceLineNo">269</span>  public WAL getWAL(RegionInfo region) throws IOException {<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    // use different WAL for hbase:meta<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    if (region != null &amp;&amp; region.isMetaRegion() &amp;&amp;<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      region.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      return getMetaProvider().getWAL(region);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    } else {<a name="line.274"></a>
+<span class="sourceLineNo">275</span>      return provider.getWAL(region);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  }<a name="line.277"></a>
+<span class="sourceLineNo">278</span><a name="line.278"></a>
+<span class="sourceLineNo">279</span>  public Reader createReader(final FileSystem fs, final Path path) throws IOException {<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    return createReader(fs, path, (CancelableProgressable)null);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  }<a name="line.281"></a>
+<span class="sourceLineNo">282</span><a name="line.282"></a>
+<span class="sourceLineNo">283</span>  /**<a name="line.283"></a>
+<span class="sourceLineNo">284</span>   * Create a reader for the WAL. If you are reading from a file that's being written to and need<a name="line.284"></a>
+<span class="sourceLineNo">285</span>   * to reopen it multiple times, use {@link WAL.Reader#reset()} instead of this method<a name="line.285"></a>
+<span class="sourceLineNo">286</span>   * then just seek back to the last known good position.<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * @return A WAL reader.  Close when done with it.<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   * @throws IOException<a name="line.288"></a>
+<span class="sourceLineNo">289</span>   */<a name="line.289"></a>
+<span class="sourceLineNo">290</span>  public Reader createReader(final FileSystem fs, final Path path,<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      CancelableProgressable reporter) throws IOException {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    return createReader(fs, path, reporter, true);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>  public Reader createReader(final FileSystem fs, final Path path, CancelableProgressable reporter,<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      boolean allowCustom) throws IOException {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    Class&lt;? extends AbstractFSWALProvider.Reader&gt; lrClass =<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        allowCustom ? logReaderClass : ProtobufLogReader.class;<a name="line.298"></a>
+<span class="sourceLineNo">299</span>    try {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      // A wal file could be under recovery, so it may take several<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      // tries to get it open. Instead of claiming it is corrupted, retry<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      // to open it up to 5 minutes by default.<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      long startWaiting = EnvironmentEdgeManager.currentTime();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>      long openTimeout = timeoutMillis + startWaiting;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      int nbAttempt = 0;<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      AbstractFSWALProvider.Reader reader = null;<a name="line.306"></a>
+<span class="sourceLineNo">307</span>      while (true) {<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        try {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>          reader = lrClass.getDeclaredConstructor().newInstance();<a name="line.309"></a>
+<span class="sourceLineNo">310</span>          reader.init(fs, path, conf, null);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>          return reader;<a name="line.311"></a>
+<span class="sourceLineNo">312</span>        } catch (IOException e) {<a name="line.312"></a>
+<span class="sourceLineNo">313</span>          if (reader != null) {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>            try {<a name="line.314"></a>
+<span class="sourceLineNo">315</span>              reader.close();<a name="line.315"></a>
+<span class="sourceLineNo">316</span>            } catch (IOException exception) {<a name="line.316"></a>
+<span class="sourceLineNo">317</span>              LOG.warn("Could not close FSDataInputStream" + exception.getMessage());<a name="line.317"></a>
+<span class="sourceLineNo">318</span>              LOG.debug("exception details", exception);<a name="line.318"></a>
+<span class="sourceLineNo">319</span>            }<a name="line.319"></a>
+<span class="sourceLineNo">320</span>          }<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>          String msg = e.getMessage();<a name="line.322"></a>
+<span class="sourceLineNo">323</span>          if (msg != null<a name="line.323"></a>
+<span class="sourceLineNo">324</span>              &amp;&amp; (msg.contains("Cannot obtain block length")<a name="line.324"></a>
+<span class="sourceLineNo">325</span>                  || msg.contains("Could not obtain the last block") || msg<a name="line.325"></a>
+<span class="sourceLineNo">326</span>                    .matches("Blocklist for [^ ]* has changed.*"))) {<a name="line.326"></a>
+<span class="sourceLineNo">327</span>            if (++nbAttempt == 1) {<a name="line.327"></a>
+<span class="sourceLineNo">328</span>              LOG.warn("Lease should have recovered. This is not expected. Will retry", e);<a name="line.328"></a>
+<span class="sourceLineNo">329</span>            }<a name="line.329"></a>
+<span class="sourceLineNo">330</span>            if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.330"></a>
+<span class="sourceLineNo">331</span>              throw new InterruptedIOException("Operation is cancelled");<a name="line.331"></a>
+<span class="sourceLineNo">332</span>            }<a name="line.332"></a>
+<span class="sourceLineNo">333</span>            if (nbAttempt &gt; 2 &amp;&amp; openTimeout &lt; EnvironmentEdgeManager.currentTime()) {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>              LOG.error("Can't open after " + nbAttempt + " attempts and "<a name="line.334"></a>
+<span class="sourceLineNo">335</span>                  + (EnvironmentEdgeManager.currentTime() - startWaiting) + "ms " + " for " + path);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>            } else {<a name="line.336"></a>
+<span class="sourceLineNo">337</span>              try {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>                Thread.sleep(nbAttempt &lt; 3 ? 500 : 1000);<a name="line.338"></a>
+<span class="sourceLineNo">339</span>                continue; // retry<a name="line.339"></a>
+<span class="sourceLineNo">340</span>              } catch (InterruptedException ie) {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>                InterruptedIOException iioe = new InterruptedIOException();<a name="line.341"></a>
+<span class="sourceLineNo">342</span>                iioe.initCause(ie);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>                throw iioe;<a name="line.343"></a>
+<span class="sourceLineNo">344</span>              }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>            }<a name="line.345"></a>
+<span class="sourceLineNo">346</span>            throw new LeaseNotRecoveredException(e);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>          } else {<a name="line.347"></a>
+<span class="sourceLineNo">348</span>            throw e;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>          }<a name="line.349"></a>
+<span class="sourceLineNo">350</span>        }<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      }<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    } catch (IOException ie) {<a name="line.352"></a>
+<span class="sourceLineNo">353</span>      throw ie;<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    } catch (Exception e) {<a name="line.354"></a>
+<span class="sourceLineNo">355</span>      throw new IOException("Cannot get log reader", e);<a name="line.355"></a>
+<span class="sourceLineNo">356</span>    }<a name="line.356"></a>
+<span class="sourceLineNo">357</span>  }<a name="line.357"></a>
+<span class="sourceLineNo">358</span><a name="line.358"></a>
+<span class="sourceLineNo">359</span>  /**<a name="line.359"></a>
+<span class="sourceLineNo">360</span>   * Create a writer for the WAL.<a name="line.360"></a>
+<span class="sourceLineNo">361</span>   * Uses defaults.<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * &lt;p&gt;<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Should be package-private. public only for tests and<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * {@link org.apache.hadoop.hbase.regionserver.wal.Compressor}<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * @return A WAL writer. Close when done with it.<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
+<span class="sourceLineNo">367</span>  public Writer createWALWriter(final FileSystem fs, final Path path) throws IOException {<a name="line.367"></a>
+<span class="sourceLineNo">368</span>    return FSHLogProvider.createWriter(conf, fs, path, false);<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  }<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>  /**<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * Should be package-private, visible for recovery testing.<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   * Uses defaults.<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   * @return an overwritable writer for recovered edits. caller should close.<a name="line.374"></a>
+<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
+<span class="sourceLineNo">376</span>  @VisibleForTesting<a name="line.376"></a>
+<span class="sourceLineNo">377</span>  public Writer createRecoveredEditsWriter(final FileSystem fs, final Path path)<a name="line.377"></a>
+<span class="sourceLineNo">378</span>      throws IOException {<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    return FSHLogProvider.createWriter(conf, fs, path, true);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  // These static methods are currently used where it's impractical to<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  // untangle the reliance on state in the filesystem. They rely on singleton<a name="line.383"></a>
+<span class="sourceLineNo">384</span>  // WALFactory that just provides Reader / Writers.<a name="line.384"></a>
+<span class="sourceLineNo">385</span>  // For now, first Configuration object wins. Practically this just impacts the reader/writer class<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  private static final AtomicReference&lt;WALFactory&gt; singleton = new AtomicReference&lt;&gt;();<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  private static final String SINGLETON_ID = WALFactory.class.getName();<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  <a name="line.388"></a>
+<span class="sourceLineNo">389</span>  // Public only for FSHLog<a name="line.389"></a>
+<span class="sourceLineNo">390</span>  public static WALFactory getInstance(Configuration configuration) {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    WALFactory factory = singleton.get();<a name="line.391"></a>
+<span class="sourceLineNo">392</span>    if (null == factory) {<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      WALFactory temp = new WALFactory(configuration);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>      if (singleton.compareAndSet(null, temp)) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>        factory = temp;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>      } else {<a name="line.396"></a>
+<span class="sourceLineNo">397</span>        // someone else beat us to initializing<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        try {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>          temp.close();<a name="line.399"></a>
+<span class="sourceLineNo">400</span>        } catch (IOException exception) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>          LOG.debug("failed to close temporary singleton. ignoring.", exception);<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        }<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        factory = singleton.get();<a name="line.403"></a>
+<span class="sourceLineNo">404</span>      }<a name="line.404"></a>
+<span class="sourceLineNo">405</span>    }<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    return factory;<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * Create a reader for the given path, accept custom reader classes from conf.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.411"></a>
+<span class="sourceLineNo">412</span>   * @return a WAL Reader, caller must close.<a name="line.412"></a>
+<span class="sourceLineNo">413</span>   */<a name="line.413"></a>
+<span class="sourceLineNo">414</span>  public static Reader createReader(final FileSystem fs, final Path path,<a name="line.414"></a>
+<span class="sourceLineNo">415</span>      final Configuration configuration) throws IOException {<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    return getInstance(configuration).createReader(fs, path);<a name="line.416"></a>
+<span class="sourceLineNo">417</span>  }<a name="line.417"></a>
+<span class="sourceLineNo">418</span><a name="line.418"></a>
+<span class="sourceLineNo">419</span>  /**<a name="line.419"></a>
+<span class="sourceLineNo">420</span>   * Create a reader for the given path, accept custom reader classes from conf.<a name="line.420"></a>
+<span class="sourceLineNo">421</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.421"></a>
+<span class="sourceLineNo">422</span>   * @return a WAL Reader, caller must close.<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   */<a name="line.423"></a>
+<span class="sourceLineNo">424</span>  static Reader createReader(final FileSystem fs, final Path path,<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      final Configuration configuration, final CancelableProgressable reporter) throws IOException {<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    return getInstance(configuration).createReader(fs, path, reporter);<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  }<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  /**<a name="line.429"></a>
+<span class="sourceLineNo">430</span>   * Create a reader for the given path, ignore custom reader classes from conf.<a name="line.430"></a>
+<span class="sourceLineNo">431</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.431"></a>
+<span class="sourceLineNo">432</span>   * only public pending move of {@link org.apache.hadoop.hbase.regionserver.wal.Compressor}<a name="line.432"></a>
+<span class="sourceLineNo">433</span>   * @return a WAL Reader, caller must close.<a name="line.433"></a>
+<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
+<span class="sourceLineNo">435</span>  public static Reader createReaderIgnoreCustomClass(final FileSystem fs, final Path path,<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      final Configuration configuration) throws IOException {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>    return getInstance(configuration).createReader(fs, path, null, false);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>  }<a name="line.438"></a>
+<span class="sourceLineNo">439</span><a name="line.439"></a>
+<span class="sourceLineNo">440</span>  /**<a name="line.440"></a>
+<span class="sourceLineNo">441</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.441"></a>
+<span class="sourceLineNo">442</span>   * Uses defaults.<a name="line.442"></a>
+<span class="sourceLineNo">443</span>   * @return a Writer that will overwrite files. Caller must close.<a name="line.443"></a>
+<span class="sourceLineNo">444</span>   */<a name="line.444"></a>
+<span class="sourceLineNo">445</span>  static Writer createRecoveredEditsWriter(final FileSystem fs, final Path path,<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      final Configuration configuration)<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      throws IOException {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    return FSHLogProvider.createWriter(configuration, fs, path, true);<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  }<a name="line.449"></a>
+<span class="sourceLineNo">450</span><a name="line.450"></a>
+<span class="sourceLineNo">451</span>  /**<a name="line.451"></a>
+<span class="sourceLineNo">452</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.452"></a>
+<span class="sourceLineNo">453</span>   * Uses defaults.<a name="line.453"></a>
+<span class="sourceLineNo">454</span>   * @return a writer that won't overwrite files. Caller must close.<a name="line.454"></a>
+<span class="sourceLineNo">455</span>   */<a name="line.455"></a>
+<span class="sourceLineNo">456</span>  @VisibleForTesting<a name="line.456"></a>
+<span class="sourceLineNo">457</span>  public static Writer createWALWriter(final FileSystem fs, final Path path,<a name="line.457"></a>
+<span class="sourceLineNo">458</span>      final Configuration configuration)<a name="line.458"></a>
+<span class="sourceLineNo">459</span>      throws IOException {<a name="line.459"></a>
+<span class="sourceLineNo">460</span>    return FSHLogProvider.createWriter(configuration, fs, path, false);<a name="line.460"></a>
+<span class="sourceLineNo">461</span>  }<a name="line.461"></a>
+<span class="sourceLineNo">462</span><a name="line.462"></a>
+<span class="sourceLineNo">463</span>  public final WALProvider getWALProvider() {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>    return this.provider;<a name="line.464"></a>
+<span class="sourceLineNo">465</span>  }<a name="line.465"></a>
+<span class="sourceLineNo">466</span><a name="line.466"></a>
+<span class="sourceLineNo">467</span>  public final WALProvider getMetaWALProvider() {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return this.metaProvider.get();<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span>}<a name="line.470"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/downloads.html
----------------------------------------------------------------------
diff --git a/downloads.html b/downloads.html
index 4b721d2..3cacc07 100644
--- a/downloads.html
+++ b/downloads.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBase Downloads</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -423,7 +423,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/export_control.html
----------------------------------------------------------------------
diff --git a/export_control.html b/export_control.html
index b77cbb9..04aa869 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Export Control
@@ -331,7 +331,7 @@ for more details.</p>
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/index.html
----------------------------------------------------------------------
diff --git a/index.html b/index.html
index e942efd..8e1b388 100644
--- a/index.html
+++ b/index.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBaseâ„¢ Home</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -411,7 +411,7 @@ Apache HBase is an open-source, distributed, versioned, non-relational database
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/integration.html
----------------------------------------------------------------------
diff --git a/integration.html b/integration.html
index f4412be..e6bc91d 100644
--- a/integration.html
+++ b/integration.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; CI Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -291,7 +291,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/issue-tracking.html
----------------------------------------------------------------------
diff --git a/issue-tracking.html b/issue-tracking.html
index 0138919..5748e0f 100644
--- a/issue-tracking.html
+++ b/issue-tracking.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Issue Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -288,7 +288,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/license.html
----------------------------------------------------------------------
diff --git a/license.html b/license.html
index fb4e2fd..bdfe998 100644
--- a/license.html
+++ b/license.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Licenses</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -491,7 +491,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/mail-lists.html
----------------------------------------------------------------------
diff --git a/mail-lists.html b/mail-lists.html
index 6bdba55..428a3f1 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Mailing Lists</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -341,7 +341,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/metrics.html
----------------------------------------------------------------------
diff --git a/metrics.html b/metrics.html
index 8015deb..afa6d18 100644
--- a/metrics.html
+++ b/metrics.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Apache HBase (TM) Metrics
@@ -459,7 +459,7 @@ export HBASE_REGIONSERVER_OPTS=&quot;$HBASE_JMX_OPTS -Dcom.sun.management.jmxrem
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/old_news.html
----------------------------------------------------------------------
diff --git a/old_news.html b/old_news.html
index c1b892e..10c2753 100644
--- a/old_news.html
+++ b/old_news.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Old Apache HBase (TM) News
@@ -440,7 +440,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/plugin-management.html
----------------------------------------------------------------------
diff --git a/plugin-management.html b/plugin-management.html
index 4662d17..2e52273 100644
--- a/plugin-management.html
+++ b/plugin-management.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Plugin Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -440,7 +440,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 


[34/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
index 1e0659a..981ebcd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
@@ -73,1969 +73,1975 @@
 <span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Table;<a name="line.65"></a>
 <span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.66"></a>
 <span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.slf4j.Logger;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.slf4j.LoggerFactory;<a name="line.75"></a>
-<span class="sourceLineNo">076</span><a name="line.76"></a>
-<span class="sourceLineNo">077</span>/**<a name="line.77"></a>
-<span class="sourceLineNo">078</span> * This class provides API to access backup system table&lt;br&gt;<a name="line.78"></a>
-<span class="sourceLineNo">079</span> * Backup system table schema:&lt;br&gt;<a name="line.79"></a>
-<span class="sourceLineNo">080</span> * &lt;p&gt;<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * &lt;ul&gt;<a name="line.81"></a>
-<span class="sourceLineNo">082</span> * &lt;li&gt;1. Backup sessions rowkey= "session:"+backupId; value =serialized BackupInfo&lt;/li&gt;<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * &lt;li&gt;2. Backup start code rowkey = "startcode:"+backupRoot; value = startcode&lt;/li&gt;<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * &lt;li&gt;3. Incremental backup set rowkey="incrbackupset:"+backupRoot; value=[list of tables]&lt;/li&gt;<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * &lt;li&gt;4. Table-RS-timestamp map rowkey="trslm:"+backupRoot+table_name; value = map[RS-&gt; last WAL<a name="line.85"></a>
-<span class="sourceLineNo">086</span> * timestamp]&lt;/li&gt;<a name="line.86"></a>
-<span class="sourceLineNo">087</span> * &lt;li&gt;5. RS - WAL ts map rowkey="rslogts:"+backupRoot +server; value = last WAL timestamp&lt;/li&gt;<a name="line.87"></a>
-<span class="sourceLineNo">088</span> * &lt;li&gt;6. WALs recorded rowkey="wals:"+WAL unique file name; value = backupId and full WAL file<a name="line.88"></a>
-<span class="sourceLineNo">089</span> * name&lt;/li&gt;<a name="line.89"></a>
-<span class="sourceLineNo">090</span> * &lt;/ul&gt;<a name="line.90"></a>
-<span class="sourceLineNo">091</span> * &lt;/p&gt;<a name="line.91"></a>
-<span class="sourceLineNo">092</span> */<a name="line.92"></a>
-<span class="sourceLineNo">093</span>@InterfaceAudience.Private<a name="line.93"></a>
-<span class="sourceLineNo">094</span>public final class BackupSystemTable implements Closeable {<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  private static final Logger LOG = LoggerFactory.getLogger(BackupSystemTable.class);<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>  static class WALItem {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    String backupId;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    String walFile;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    String backupRoot;<a name="line.101"></a>
-<span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>    WALItem(String backupId, String walFile, String backupRoot) {<a name="line.103"></a>
-<span class="sourceLineNo">104</span>      this.backupId = backupId;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>      this.walFile = walFile;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      this.backupRoot = backupRoot;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    }<a name="line.107"></a>
-<span class="sourceLineNo">108</span><a name="line.108"></a>
-<span class="sourceLineNo">109</span>    public String getBackupId() {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      return backupId;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    }<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>    public String getWalFile() {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      return walFile;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    }<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>    public String getBackupRoot() {<a name="line.117"></a>
-<span class="sourceLineNo">118</span>      return backupRoot;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>    @Override<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    public String toString() {<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      return Path.SEPARATOR + backupRoot + Path.SEPARATOR + backupId + Path.SEPARATOR + walFile;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    }<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  }<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  /**<a name="line.127"></a>
-<span class="sourceLineNo">128</span>   * Backup system table (main) name<a name="line.128"></a>
-<span class="sourceLineNo">129</span>   */<a name="line.129"></a>
-<span class="sourceLineNo">130</span>  private TableName tableName;<a name="line.130"></a>
-<span class="sourceLineNo">131</span><a name="line.131"></a>
-<span class="sourceLineNo">132</span>  /**<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * Backup System table name for bulk loaded files. We keep all bulk loaded file references in a<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * separate table because we have to isolate general backup operations: create, merge etc from<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * activity of RegionObserver, which controls process of a bulk loading<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * {@link org.apache.hadoop.hbase.backup.BackupObserver}<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  private TableName bulkLoadTableName;<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>  /**<a name="line.140"></a>
-<span class="sourceLineNo">141</span>   * Stores backup sessions (contexts)<a name="line.141"></a>
-<span class="sourceLineNo">142</span>   */<a name="line.142"></a>
-<span class="sourceLineNo">143</span>  final static byte[] SESSIONS_FAMILY = "session".getBytes();<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  /**<a name="line.144"></a>
-<span class="sourceLineNo">145</span>   * Stores other meta<a name="line.145"></a>
-<span class="sourceLineNo">146</span>   */<a name="line.146"></a>
-<span class="sourceLineNo">147</span>  final static byte[] META_FAMILY = "meta".getBytes();<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  final static byte[] BULK_LOAD_FAMILY = "bulk".getBytes();<a name="line.148"></a>
-<span class="sourceLineNo">149</span>  /**<a name="line.149"></a>
-<span class="sourceLineNo">150</span>   * Connection to HBase cluster, shared among all instances<a name="line.150"></a>
-<span class="sourceLineNo">151</span>   */<a name="line.151"></a>
-<span class="sourceLineNo">152</span>  private final Connection connection;<a name="line.152"></a>
-<span class="sourceLineNo">153</span><a name="line.153"></a>
-<span class="sourceLineNo">154</span>  private final static String BACKUP_INFO_PREFIX = "session:";<a name="line.154"></a>
-<span class="sourceLineNo">155</span>  private final static String START_CODE_ROW = "startcode:";<a name="line.155"></a>
-<span class="sourceLineNo">156</span>  private final static byte[] ACTIVE_SESSION_ROW = "activesession:".getBytes();<a name="line.156"></a>
-<span class="sourceLineNo">157</span>  private final static byte[] ACTIVE_SESSION_COL = "c".getBytes();<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>  private final static byte[] ACTIVE_SESSION_YES = "yes".getBytes();<a name="line.159"></a>
-<span class="sourceLineNo">160</span>  private final static byte[] ACTIVE_SESSION_NO = "no".getBytes();<a name="line.160"></a>
-<span class="sourceLineNo">161</span><a name="line.161"></a>
-<span class="sourceLineNo">162</span>  private final static String INCR_BACKUP_SET = "incrbackupset:";<a name="line.162"></a>
-<span class="sourceLineNo">163</span>  private final static String TABLE_RS_LOG_MAP_PREFIX = "trslm:";<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  private final static String RS_LOG_TS_PREFIX = "rslogts:";<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  private final static String BULK_LOAD_PREFIX = "bulk:";<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  private final static byte[] BULK_LOAD_PREFIX_BYTES = BULK_LOAD_PREFIX.getBytes();<a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private final static byte[] DELETE_OP_ROW = "delete_op_row".getBytes();<a name="line.168"></a>
-<span class="sourceLineNo">169</span>  private final static byte[] MERGE_OP_ROW = "merge_op_row".getBytes();<a name="line.169"></a>
-<span class="sourceLineNo">170</span><a name="line.170"></a>
-<span class="sourceLineNo">171</span>  final static byte[] TBL_COL = Bytes.toBytes("tbl");<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  final static byte[] FAM_COL = Bytes.toBytes("fam");<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  final static byte[] PATH_COL = Bytes.toBytes("path");<a name="line.173"></a>
-<span class="sourceLineNo">174</span>  final static byte[] STATE_COL = Bytes.toBytes("state");<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  // the two states a bulk loaded file can be<a name="line.175"></a>
-<span class="sourceLineNo">176</span>  final static byte[] BL_PREPARE = Bytes.toBytes("R");<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  final static byte[] BL_COMMIT = Bytes.toBytes("D");<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  private final static String WALS_PREFIX = "wals:";<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  private final static String SET_KEY_PREFIX = "backupset:";<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>  // separator between BULK_LOAD_PREFIX and ordinals<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  protected final static String BLK_LD_DELIM = ":";<a name="line.183"></a>
-<span class="sourceLineNo">184</span>  private final static byte[] EMPTY_VALUE = new byte[] {};<a name="line.184"></a>
-<span class="sourceLineNo">185</span><a name="line.185"></a>
-<span class="sourceLineNo">186</span>  // Safe delimiter in a string<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  private final static String NULL = "\u0000";<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>  public BackupSystemTable(Connection conn) throws IOException {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>    this.connection = conn;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>    Configuration conf = this.connection.getConfiguration();<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    tableName = BackupSystemTable.getTableName(conf);<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    bulkLoadTableName = BackupSystemTable.getTableNameForBulkLoadedData(conf);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    checkSystemTable();<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private void checkSystemTable() throws IOException {<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    try (Admin admin = connection.getAdmin()) {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      verifyNamespaceExists(admin);<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      Configuration conf = connection.getConfiguration();<a name="line.200"></a>
-<span class="sourceLineNo">201</span>      if (!admin.tableExists(tableName)) {<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        TableDescriptor backupHTD = BackupSystemTable.getSystemTableDescriptor(conf);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>        admin.createTable(backupHTD);<a name="line.203"></a>
-<span class="sourceLineNo">204</span>      }<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      if (!admin.tableExists(bulkLoadTableName)) {<a name="line.205"></a>
-<span class="sourceLineNo">206</span>        TableDescriptor blHTD = BackupSystemTable.getSystemTableForBulkLoadedDataDescriptor(conf);<a name="line.206"></a>
-<span class="sourceLineNo">207</span>        admin.createTable(blHTD);<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      }<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      waitForSystemTable(admin, tableName);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      waitForSystemTable(admin, bulkLoadTableName);<a name="line.210"></a>
-<span class="sourceLineNo">211</span>    }<a name="line.211"></a>
-<span class="sourceLineNo">212</span>  }<a name="line.212"></a>
-<span class="sourceLineNo">213</span><a name="line.213"></a>
-<span class="sourceLineNo">214</span>  private void verifyNamespaceExists(Admin admin) throws IOException {<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    String namespaceName = tableName.getNamespaceAsString();<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    NamespaceDescriptor ns = NamespaceDescriptor.create(namespaceName).build();<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    NamespaceDescriptor[] list = admin.listNamespaceDescriptors();<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    boolean exists = false;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    for (NamespaceDescriptor nsd : list) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      if (nsd.getName().equals(ns.getName())) {<a name="line.220"></a>
-<span class="sourceLineNo">221</span>        exists = true;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        break;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    }<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    if (!exists) {<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      admin.createNamespace(ns);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    }<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  }<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private void waitForSystemTable(Admin admin, TableName tableName) throws IOException {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    long TIMEOUT = 60000;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    long startTime = EnvironmentEdgeManager.currentTime();<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    while (!admin.tableExists(tableName) || !admin.isTableAvailable(tableName)) {<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      try {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        Thread.sleep(100);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      } catch (InterruptedException e) {<a name="line.236"></a>
-<span class="sourceLineNo">237</span>      }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      if (EnvironmentEdgeManager.currentTime() - startTime &gt; TIMEOUT) {<a name="line.238"></a>
-<span class="sourceLineNo">239</span>        throw new IOException(<a name="line.239"></a>
-<span class="sourceLineNo">240</span>          "Failed to create backup system table " + tableName + " after " + TIMEOUT + "ms");<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      }<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    }<a name="line.242"></a>
-<span class="sourceLineNo">243</span>    LOG.debug("Backup table " + tableName + " exists and available");<a name="line.243"></a>
-<span class="sourceLineNo">244</span>  }<a name="line.244"></a>
-<span class="sourceLineNo">245</span><a name="line.245"></a>
-<span class="sourceLineNo">246</span>  @Override<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public void close() {<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    // do nothing<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  }<a name="line.249"></a>
-<span class="sourceLineNo">250</span><a name="line.250"></a>
-<span class="sourceLineNo">251</span>  /**<a name="line.251"></a>
-<span class="sourceLineNo">252</span>   * Updates status (state) of a backup session in backup system table table<a name="line.252"></a>
-<span class="sourceLineNo">253</span>   * @param info backup info<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @throws IOException exception<a name="line.254"></a>
-<span class="sourceLineNo">255</span>   */<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  public void updateBackupInfo(BackupInfo info) throws IOException {<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    if (LOG.isTraceEnabled()) {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      LOG.trace("update backup status in backup system table for: " + info.getBackupId()<a name="line.258"></a>
-<span class="sourceLineNo">259</span>        + " set status=" + info.getState());<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    try (Table table = connection.getTable(tableName)) {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      Put put = createPutForBackupInfo(info);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      table.put(put);<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
-<span class="sourceLineNo">265</span>  }<a name="line.265"></a>
-<span class="sourceLineNo">266</span><a name="line.266"></a>
-<span class="sourceLineNo">267</span>  /*<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * @param backupId the backup Id<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * @return Map of rows to path of bulk loaded hfile<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   */<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  Map&lt;byte[], String&gt; readBulkLoadedFiles(String backupId) throws IOException {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    Scan scan = BackupSystemTable.createScanForBulkLoadedFiles(backupId);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    try (Table table = connection.getTable(bulkLoadTableName);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>        ResultScanner scanner = table.getScanner(scan)) {<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      Result res = null;<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      Map&lt;byte[], String&gt; map = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      while ((res = scanner.next()) != null) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        res.advance();<a name="line.278"></a>
-<span class="sourceLineNo">279</span>        byte[] row = CellUtil.cloneRow(res.listCells().get(0));<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        for (Cell cell : res.listCells()) {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>          if (CellUtil.compareQualifiers(cell, BackupSystemTable.PATH_COL, 0,<a name="line.281"></a>
-<span class="sourceLineNo">282</span>            BackupSystemTable.PATH_COL.length) == 0) {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>            map.put(row, Bytes.toString(CellUtil.cloneValue(cell)));<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          }<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        }<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      }<a name="line.286"></a>
-<span class="sourceLineNo">287</span>      return map;<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    }<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  }<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  /*<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   * Used during restore<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   * @param backupId the backup Id<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * @param sTableList List of tables<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   * @return array of Map of family to List of Paths<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   */<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  public Map&lt;byte[], List&lt;Path&gt;&gt;[] readBulkLoadedFiles(String backupId, List&lt;TableName&gt; sTableList)<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      throws IOException {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    Scan scan = BackupSystemTable.createScanForBulkLoadedFiles(backupId);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    Map&lt;byte[], List&lt;Path&gt;&gt;[] mapForSrc = new Map[sTableList == null ? 1 : sTableList.size()];<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    try (Table table = connection.getTable(bulkLoadTableName);<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        ResultScanner scanner = table.getScanner(scan)) {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      Result res = null;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      while ((res = scanner.next()) != null) {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        res.advance();<a name="line.305"></a>
-<span class="sourceLineNo">306</span>        TableName tbl = null;<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        byte[] fam = null;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>        String path = null;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        for (Cell cell : res.listCells()) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          if (CellUtil.compareQualifiers(cell, BackupSystemTable.TBL_COL, 0,<a name="line.310"></a>
-<span class="sourceLineNo">311</span>            BackupSystemTable.TBL_COL.length) == 0) {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>            tbl = TableName.valueOf(CellUtil.cloneValue(cell));<a name="line.312"></a>
-<span class="sourceLineNo">313</span>          } else if (CellUtil.compareQualifiers(cell, BackupSystemTable.FAM_COL, 0,<a name="line.313"></a>
-<span class="sourceLineNo">314</span>            BackupSystemTable.FAM_COL.length) == 0) {<a name="line.314"></a>
-<span class="sourceLineNo">315</span>            fam = CellUtil.cloneValue(cell);<a name="line.315"></a>
-<span class="sourceLineNo">316</span>          } else if (CellUtil.compareQualifiers(cell, BackupSystemTable.PATH_COL, 0,<a name="line.316"></a>
-<span class="sourceLineNo">317</span>            BackupSystemTable.PATH_COL.length) == 0) {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>            path = Bytes.toString(CellUtil.cloneValue(cell));<a name="line.318"></a>
-<span class="sourceLineNo">319</span>          }<a name="line.319"></a>
-<span class="sourceLineNo">320</span>        }<a name="line.320"></a>
-<span class="sourceLineNo">321</span>        int srcIdx = IncrementalTableBackupClient.getIndex(tbl, sTableList);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>        if (srcIdx == -1) {<a name="line.322"></a>
-<span class="sourceLineNo">323</span>          // the table is not among the query<a name="line.323"></a>
-<span class="sourceLineNo">324</span>          continue;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        }<a name="line.325"></a>
-<span class="sourceLineNo">326</span>        if (mapForSrc[srcIdx] == null) {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>          mapForSrc[srcIdx] = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        }<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        List&lt;Path&gt; files;<a name="line.329"></a>
-<span class="sourceLineNo">330</span>        if (!mapForSrc[srcIdx].containsKey(fam)) {<a name="line.330"></a>
-<span class="sourceLineNo">331</span>          files = new ArrayList&lt;Path&gt;();<a name="line.331"></a>
-<span class="sourceLineNo">332</span>          mapForSrc[srcIdx].put(fam, files);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>        } else {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          files = mapForSrc[srcIdx].get(fam);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>        }<a name="line.335"></a>
-<span class="sourceLineNo">336</span>        files.add(new Path(path));<a name="line.336"></a>
-<span class="sourceLineNo">337</span>        if (LOG.isDebugEnabled()) {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          LOG.debug("found bulk loaded file : " + tbl + " " + Bytes.toString(fam) + " " + path);<a name="line.338"></a>
-<span class="sourceLineNo">339</span>        }<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      }<a name="line.340"></a>
-<span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>      return mapForSrc;<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    }<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  /**<a name="line.346"></a>
-<span class="sourceLineNo">347</span>   * Deletes backup status from backup system table table<a name="line.347"></a>
-<span class="sourceLineNo">348</span>   * @param backupId backup id<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   * @throws IOException exception<a name="line.349"></a>
-<span class="sourceLineNo">350</span>   */<a name="line.350"></a>
-<span class="sourceLineNo">351</span>  public void deleteBackupInfo(String backupId) throws IOException {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    if (LOG.isTraceEnabled()) {<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      LOG.trace("delete backup status in backup system table for " + backupId);<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    }<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    try (Table table = connection.getTable(tableName)) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      Delete del = createDeleteForBackupInfo(backupId);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      table.delete(del);<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    }<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  /*<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * For postBulkLoadHFile() hook.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param tabName table name<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @param region the region receiving hfile<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @param finalPaths family and associated hfiles<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  public void writePathsPostBulkLoad(TableName tabName, byte[] region,<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      Map&lt;byte[], List&lt;Path&gt;&gt; finalPaths) throws IOException {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    if (LOG.isDebugEnabled()) {<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      LOG.debug("write bulk load descriptor to backup " + tabName + " with " + finalPaths.size()<a name="line.370"></a>
-<span class="sourceLineNo">371</span>        + " entries");<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    }<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    try (Table table = connection.getTable(bulkLoadTableName)) {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      List&lt;Put&gt; puts = BackupSystemTable.createPutForCommittedBulkload(tabName, region, finalPaths);<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      table.put(puts);<a name="line.375"></a>
-<span class="sourceLineNo">376</span>      LOG.debug("written " + puts.size() + " rows for bulk load of " + tabName);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    }<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  }<a name="line.378"></a>
-<span class="sourceLineNo">379</span><a name="line.379"></a>
-<span class="sourceLineNo">380</span>  /*<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   * For preCommitStoreFile() hook<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   * @param tabName table name<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * @param region the region receiving hfile<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param family column family<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @param pairs list of paths for hfiles<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  public void writeFilesForBulkLoadPreCommit(TableName tabName, byte[] region, final byte[] family,<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      final List&lt;Pair&lt;Path, Path&gt;&gt; pairs) throws IOException {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    if (LOG.isDebugEnabled()) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>      LOG.debug(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        "write bulk load descriptor to backup " + tabName + " with " + pairs.size() + " entries");<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    }<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    try (Table table = connection.getTable(bulkLoadTableName)) {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      List&lt;Put&gt; puts =<a name="line.394"></a>
-<span class="sourceLineNo">395</span>          BackupSystemTable.createPutForPreparedBulkload(tabName, region, family, pairs);<a name="line.395"></a>
-<span class="sourceLineNo">396</span>      table.put(puts);<a name="line.396"></a>
-<span class="sourceLineNo">397</span>      LOG.debug("written " + puts.size() + " rows for bulk load of " + tabName);<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    }<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  }<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  /*<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   * Removes rows recording bulk loaded hfiles from backup table<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   * @param lst list of table names<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * @param rows the rows to be deleted<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   */<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  public void deleteBulkLoadedRows(List&lt;byte[]&gt; rows) throws IOException {<a name="line.406"></a>
-<span class="sourceLineNo">407</span>    try (Table table = connection.getTable(bulkLoadTableName)) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      List&lt;Delete&gt; lstDels = new ArrayList&lt;&gt;();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      for (byte[] row : rows) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>        Delete del = new Delete(row);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>        lstDels.add(del);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        LOG.debug("orig deleting the row: " + Bytes.toString(row));<a name="line.412"></a>
-<span class="sourceLineNo">413</span>      }<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      table.delete(lstDels);<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      LOG.debug("deleted " + rows.size() + " original bulkload rows");<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    }<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  }<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  /*<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * Reads the rows from backup table recording bulk loaded hfiles<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @param tableList list of table names<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   * @return The keys of the Map are table, region and column family. Value of the map reflects<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   * whether the hfile was recorded by preCommitStoreFile hook (true)<a name="line.423"></a>
-<span class="sourceLineNo">424</span>   */<a name="line.424"></a>
-<span class="sourceLineNo">425</span>  public Pair&lt;Map&lt;TableName, Map&lt;String, Map&lt;String, List&lt;Pair&lt;String, Boolean&gt;&gt;&gt;&gt;&gt;, List&lt;byte[]&gt;&gt;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    readBulkloadRows(List&lt;TableName&gt; tableList) throws IOException {<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    Map&lt;TableName, Map&lt;String, Map&lt;String, List&lt;Pair&lt;String, Boolean&gt;&gt;&gt;&gt;&gt; map = new HashMap&lt;&gt;();<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    List&lt;byte[]&gt; rows = new ArrayList&lt;&gt;();<a name="line.429"></a>
-<span class="sourceLineNo">430</span>    for (TableName tTable : tableList) {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      Scan scan = BackupSystemTable.createScanForOrigBulkLoadedFiles(tTable);<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      Map&lt;String, Map&lt;String, List&lt;Pair&lt;String, Boolean&gt;&gt;&gt;&gt; tblMap = map.get(tTable);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>      try (Table table = connection.getTable(bulkLoadTableName);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>          ResultScanner scanner = table.getScanner(scan)) {<a name="line.434"></a>
-<span class="sourceLineNo">435</span>        Result res = null;<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        while ((res = scanner.next()) != null) {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>          res.advance();<a name="line.437"></a>
-<span class="sourceLineNo">438</span>          String fam = null;<a name="line.438"></a>
-<span class="sourceLineNo">439</span>          String path = null;<a name="line.439"></a>
-<span class="sourceLineNo">440</span>          boolean raw = false;<a name="line.440"></a>
-<span class="sourceLineNo">441</span>          byte[] row;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>          String region = null;<a name="line.442"></a>
-<span class="sourceLineNo">443</span>          for (Cell cell : res.listCells()) {<a name="line.443"></a>
-<span class="sourceLineNo">444</span>            row = CellUtil.cloneRow(cell);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>            rows.add(row);<a name="line.445"></a>
-<span class="sourceLineNo">446</span>            String rowStr = Bytes.toString(row);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>            region = BackupSystemTable.getRegionNameFromOrigBulkLoadRow(rowStr);<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            if (CellUtil.compareQualifiers(cell, BackupSystemTable.FAM_COL, 0,<a name="line.448"></a>
-<span class="sourceLineNo">449</span>              BackupSystemTable.FAM_COL.length) == 0) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>              fam = Bytes.toString(CellUtil.cloneValue(cell));<a name="line.450"></a>
-<span class="sourceLineNo">451</span>            } else if (CellUtil.compareQualifiers(cell, BackupSystemTable.PATH_COL, 0,<a name="line.451"></a>
-<span class="sourceLineNo">452</span>              BackupSystemTable.PATH_COL.length) == 0) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>              path = Bytes.toString(CellUtil.cloneValue(cell));<a name="line.453"></a>
-<span class="sourceLineNo">454</span>            } else if (CellUtil.compareQualifiers(cell, BackupSystemTable.STATE_COL, 0,<a name="line.454"></a>
-<span class="sourceLineNo">455</span>              BackupSystemTable.STATE_COL.length) == 0) {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>              byte[] state = CellUtil.cloneValue(cell);<a name="line.456"></a>
-<span class="sourceLineNo">457</span>              if (Bytes.equals(BackupSystemTable.BL_PREPARE, state)) {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>                raw = true;<a name="line.458"></a>
-<span class="sourceLineNo">459</span>              } else {<a name="line.459"></a>
-<span class="sourceLineNo">460</span>                raw = false;<a name="line.460"></a>
-<span class="sourceLineNo">461</span>              }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>            }<a name="line.462"></a>
-<span class="sourceLineNo">463</span>          }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>          if (map.get(tTable) == null) {<a name="line.464"></a>
-<span class="sourceLineNo">465</span>            map.put(tTable, new HashMap&lt;&gt;());<a name="line.465"></a>
-<span class="sourceLineNo">466</span>            tblMap = map.get(tTable);<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          }<a name="line.467"></a>
-<span class="sourceLineNo">468</span>          if (tblMap.get(region) == null) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>            tblMap.put(region, new HashMap&lt;&gt;());<a name="line.469"></a>
-<span class="sourceLineNo">470</span>          }<a name="line.470"></a>
-<span class="sourceLineNo">471</span>          Map&lt;String, List&lt;Pair&lt;String, Boolean&gt;&gt;&gt; famMap = tblMap.get(region);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          if (famMap.get(fam) == null) {<a name="line.472"></a>
-<span class="sourceLineNo">473</span>            famMap.put(fam, new ArrayList&lt;&gt;());<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          }<a name="line.474"></a>
-<span class="sourceLineNo">475</span>          famMap.get(fam).add(new Pair&lt;&gt;(path, raw));<a name="line.475"></a>
-<span class="sourceLineNo">476</span>          LOG.debug("found orig " + path + " for " + fam + " of table " + region);<a name="line.476"></a>
-<span class="sourceLineNo">477</span>        }<a name="line.477"></a>
-<span class="sourceLineNo">478</span>      }<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    return new Pair&lt;&gt;(map, rows);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>  }<a name="line.481"></a>
-<span class="sourceLineNo">482</span><a name="line.482"></a>
-<span class="sourceLineNo">483</span>  /*<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * @param sTableList List of tables<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * @param maps array of Map of family to List of Paths<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * @param backupId the backup Id<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   */<a name="line.487"></a>
-<span class="sourceLineNo">488</span>  public void writeBulkLoadedFiles(List&lt;TableName&gt; sTableList, Map&lt;byte[], List&lt;Path&gt;&gt;[] maps,<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      String backupId) throws IOException {<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    try (Table table = connection.getTable(bulkLoadTableName)) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      long ts = EnvironmentEdgeManager.currentTime();<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      int cnt = 0;<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      List&lt;Put&gt; puts = new ArrayList&lt;&gt;();<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      for (int idx = 0; idx &lt; maps.length; idx++) {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>        Map&lt;byte[], List&lt;Path&gt;&gt; map = maps[idx];<a name="line.495"></a>
-<span class="sourceLineNo">496</span>        TableName tn = sTableList.get(idx);<a name="line.496"></a>
-<span class="sourceLineNo">497</span><a name="line.497"></a>
-<span class="sourceLineNo">498</span>        if (map == null) {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          continue;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>        }<a name="line.500"></a>
-<span class="sourceLineNo">501</span><a name="line.501"></a>
-<span class="sourceLineNo">502</span>        for (Map.Entry&lt;byte[], List&lt;Path&gt;&gt; entry : map.entrySet()) {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>          byte[] fam = entry.getKey();<a name="line.503"></a>
-<span class="sourceLineNo">504</span>          List&lt;Path&gt; paths = entry.getValue();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          for (Path p : paths) {<a name="line.505"></a>
-<span class="sourceLineNo">506</span>            Put put = BackupSystemTable.createPutForBulkLoadedFile(tn, fam, p.toString(), backupId,<a name="line.506"></a>
-<span class="sourceLineNo">507</span>              ts, cnt++);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>            puts.add(put);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          }<a name="line.509"></a>
-<span class="sourceLineNo">510</span>        }<a name="line.510"></a>
-<span class="sourceLineNo">511</span>      }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      if (!puts.isEmpty()) {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        table.put(puts);<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      }<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>  }<a name="line.516"></a>
-<span class="sourceLineNo">517</span><a name="line.517"></a>
-<span class="sourceLineNo">518</span>  /**<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   * Reads backup status object (instance of backup info) from backup system table table<a name="line.519"></a>
-<span class="sourceLineNo">520</span>   * @param backupId backup id<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * @return Current status of backup session or null<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   */<a name="line.522"></a>
-<span class="sourceLineNo">523</span>  public BackupInfo readBackupInfo(String backupId) throws IOException {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    if (LOG.isTraceEnabled()) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>      LOG.trace("read backup status from backup system table for: " + backupId);<a name="line.525"></a>
-<span class="sourceLineNo">526</span>    }<a name="line.526"></a>
-<span class="sourceLineNo">527</span><a name="line.527"></a>
-<span class="sourceLineNo">528</span>    try (Table table = connection.getTable(tableName)) {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      Get get = createGetForBackupInfo(backupId);<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      Result res = table.get(get);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      if (res.isEmpty()) {<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        return null;<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      }<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      return resultToBackupInfo(res);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>  }<a name="line.536"></a>
-<span class="sourceLineNo">537</span><a name="line.537"></a>
-<span class="sourceLineNo">538</span>  /**<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * Read the last backup start code (timestamp) of last successful backup. Will return null if<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * there is no start code stored on hbase or the value is of length 0. These two cases indicate<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   * there is no successful backup completed so far.<a name="line.541"></a>
-<span class="sourceLineNo">542</span>   * @param backupRoot directory path to backup destination<a name="line.542"></a>
-<span class="sourceLineNo">543</span>   * @return the timestamp of last successful backup<a name="line.543"></a>
-<span class="sourceLineNo">544</span>   * @throws IOException exception<a name="line.544"></a>
-<span class="sourceLineNo">545</span>   */<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  public String readBackupStartCode(String backupRoot) throws IOException {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    LOG.trace("read backup start code from backup system table");<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>    try (Table table = connection.getTable(tableName)) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      Get get = createGetForStartCode(backupRoot);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      Result res = table.get(get);<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      if (res.isEmpty()) {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>        return null;<a name="line.553"></a>
-<span class="sourceLineNo">554</span>      }<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      Cell cell = res.listCells().get(0);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      byte[] val = CellUtil.cloneValue(cell);<a name="line.556"></a>
-<span class="sourceLineNo">557</span>      if (val.length == 0) {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>        return null;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      return new String(val);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Write the start code (timestamp) to backup system table. If passed in null, then write 0 byte.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param startCode start code<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   * @param backupRoot root directory path to backup<a name="line.567"></a>
-<span class="sourceLineNo">568</span>   * @throws IOException exception<a name="line.568"></a>
-<span class="sourceLineNo">569</span>   */<a name="line.569"></a>
-<span class="sourceLineNo">570</span>  public void writeBackupStartCode(Long startCode, String backupRoot) throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    if (LOG.isTraceEnabled()) {<a name="line.571"></a>
-<span class="sourceLineNo">572</span>      LOG.trace("write backup start code to backup system table " + startCode);<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    }<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    try (Table table = connection.getTable(tableName)) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      Put put = createPutForStartCode(startCode.toString(), backupRoot);<a name="line.575"></a>
-<span class="sourceLineNo">576</span>      table.put(put);<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    }<a name="line.577"></a>
-<span class="sourceLineNo">578</span>  }<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>  /**<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   * Exclusive operations are: create, delete, merge<a name="line.581"></a>
-<span class="sourceLineNo">582</span>   * @throws IOException if a table operation fails or an active backup exclusive operation is<a name="line.582"></a>
-<span class="sourceLineNo">583</span>   *           already underway<a name="line.583"></a>
-<span class="sourceLineNo">584</span>   */<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  public void startBackupExclusiveOperation() throws IOException {<a name="line.585"></a>
-<span class="sourceLineNo">586</span>    LOG.debug("Start new backup exclusive operation");<a name="line.586"></a>
-<span class="sourceLineNo">587</span><a name="line.587"></a>
-<span class="sourceLineNo">588</span>    try (Table table = connection.getTable(tableName)) {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      Put put = createPutForStartBackupSession();<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      // First try to put if row does not exist<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      if (!table.checkAndMutate(ACTIVE_SESSION_ROW, SESSIONS_FAMILY).qualifier(ACTIVE_SESSION_COL)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>          .ifNotExists().thenPut(put)) {<a name="line.592"></a>
-<span class="sourceLineNo">593</span>        // Row exists, try to put if value == ACTIVE_SESSION_NO<a name="line.593"></a>
-<span class="sourceLineNo">594</span>        if (!table.checkAndMutate(ACTIVE_SESSION_ROW, SESSIONS_FAMILY).qualifier(ACTIVE_SESSION_COL)<a name="line.594"></a>
-<span class="sourceLineNo">595</span>            .ifEquals(ACTIVE_SESSION_NO).thenPut(put)) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>          throw new ExclusiveOperationException();<a name="line.596"></a>
-<span class="sourceLineNo">597</span>        }<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      }<a name="line.598"></a>
-<span class="sourceLineNo">599</span>    }<a name="line.599"></a>
-<span class="sourceLineNo">600</span>  }<a name="line.600"></a>
-<span class="sourceLineNo">601</span><a name="line.601"></a>
-<span class="sourceLineNo">602</span>  private Put createPutForStartBackupSession() {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    Put put = new Put(ACTIVE_SESSION_ROW);<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    put.addColumn(SESSIONS_FAMILY, ACTIVE_SESSION_COL, ACTIVE_SESSION_YES);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>    return put;<a name="line.605"></a>
-<span class="sourceLineNo">606</span>  }<a name="line.606"></a>
-<span class="sourceLineNo">607</span><a name="line.607"></a>
-<span class="sourceLineNo">608</span>  public void finishBackupExclusiveOperation() throws IOException {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    LOG.debug("Finish backup exclusive operation");<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>    try (Table table = connection.getTable(tableName)) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      Put put = createPutForStopBackupSession();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      if (!table.checkAndMutate(ACTIVE_SESSION_ROW, SESSIONS_FAMILY).qualifier(ACTIVE_SESSION_COL)<a name="line.613"></a>
-<span class="sourceLineNo">614</span>          .ifEquals(ACTIVE_SESSION_YES).thenPut(put)) {<a name="line.614"></a>
-<span class="sourceLineNo">615</span>        throw new IOException("There is no active backup exclusive operation");<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      }<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>  }<a name="line.618"></a>
-<span class="sourceLineNo">619</span><a name="line.619"></a>
-<span class="sourceLineNo">620</span>  private Put createPutForStopBackupSession() {<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    Put put = new Put(ACTIVE_SESSION_ROW);<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    put.addColumn(SESSIONS_FAMILY, ACTIVE_SESSION_COL, ACTIVE_SESSION_NO);<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    return put;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>  /**<a name="line.626"></a>
-<span class="sourceLineNo">627</span>   * Get the Region Servers log information after the last log roll from backup system table.<a name="line.627"></a>
-<span class="sourceLineNo">628</span>   * @param backupRoot root directory path to backup<a name="line.628"></a>
-<span class="sourceLineNo">629</span>   * @return RS log info<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * @throws IOException exception<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   */<a name="line.631"></a>
-<span class="sourceLineNo">632</span>  public HashMap&lt;String, Long&gt; readRegionServerLastLogRollResult(String backupRoot)<a name="line.632"></a>
-<span class="sourceLineNo">633</span>      throws IOException {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    LOG.trace("read region server last roll log result to backup system table");<a name="line.634"></a>
-<span class="sourceLineNo">635</span><a name="line.635"></a>
-<span class="sourceLineNo">636</span>    Scan scan = createScanForReadRegionServerLastLogRollResult(backupRoot);<a name="line.636"></a>
-<span class="sourceLineNo">637</span><a name="line.637"></a>
-<span class="sourceLineNo">638</span>    try (Table table = connection.getTable(tableName);<a name="line.638"></a>
-<span class="sourceLineNo">639</span>        ResultScanner scanner = table.getScanner(scan)) {<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      Result res;<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      HashMap&lt;String, Long&gt; rsTimestampMap = new HashMap&lt;&gt;();<a name="line.641"></a>
-<span class="sourceLineNo">642</span>      while ((res = scanner.next()) != null) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>        res.advance();<a name="line.643"></a>
-<span class="sourceLineNo">644</span>        Cell cell = res.current();<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        byte[] row = CellUtil.cloneRow(cell);<a name="line.645"></a>
-<span class="sourceLineNo">646</span>        String server = getServerNameForReadRegionServerLastLogRollResult(row);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        byte[] data = CellUtil.cloneValue(cell);<a name="line.647"></a>
-<span class="sourceLineNo">648</span>        rsTimestampMap.put(server, Bytes.toLong(data));<a name="line.648"></a>
-<span class="sourceLineNo">649</span>      }<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      return rsTimestampMap;<a name="line.650"></a>
-<span class="sourceLineNo">651</span>    }<a name="line.651"></a>
-<span class="sourceLineNo">652</span>  }<a name="line.652"></a>
-<span class="sourceLineNo">653</span><a name="line.653"></a>
-<span class="sourceLineNo">654</span>  /**<a name="line.654"></a>
-<span class="sourceLineNo">655</span>   * Writes Region Server last roll log result (timestamp) to backup system table table<a name="line.655"></a>
-<span class="sourceLineNo">656</span>   * @param server Region Server name<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   * @param ts last log timestamp<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * @param backupRoot root directory path to backup<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   * @throws IOException exception<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  public void writeRegionServerLastLogRollResult(String server, Long ts, String backupRoot)<a name="line.661"></a>
-<span class="sourceLineNo">662</span>      throws IOException {<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    LOG.trace("write region server last roll log result to backup system table");<a name="line.663"></a>
-<span class="sourceLineNo">664</span><a name="line.664"></a>
-<span class="sourceLineNo">665</span>    try (Table table = connection.getTable(tableName)) {<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      Put put = createPutForRegionServerLastLogRollResult(server, ts, backupRoot);<a name="line.666"></a>
-<span class="sourceLineNo">667</span>      table.put(put);<a name="line.667"></a>
-<span class="sourceLineNo">668</span>    }<a name="line.668"></a>
-<span class="sourceLineNo">669</span>  }<a name="line.669"></a>
-<span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>  /**<a name="line.671"></a>
-<span class="sourceLineNo">672</span>   * Get all completed backup information (in desc order by time)<a name="line.672"></a>
-<span class="sourceLineNo">673</span>   * @param onlyCompleted true, if only successfully completed sessions<a name="line.673"></a>
-<span class="sourceLineNo">674</span>   * @return history info of BackupCompleteData<a name="line.674"></a>
-<span class="sourceLineNo">675</span>   * @throws IOException exception<a name="line.675"></a>
-<span class="sourceLineNo">676</span>   */<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  public ArrayList&lt;BackupInfo&gt; getBackupHistory(boolean onlyCompleted) throws IOException {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    LOG.trace("get backup history from backup system table");<a name="line.678"></a>
-<span class="sourceLineNo">679</span><a name="line.679"></a>
-<span class="sourceLineNo">680</span>    BackupState state = onlyCompleted ? BackupState.COMPLETE : BackupState.ANY;<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    ArrayList&lt;BackupInfo&gt; list = getBackupInfos(state);<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    return BackupUtils.sortHistoryListDesc(list);<a name="line.682"></a>
-<span class="sourceLineNo">683</span>  }<a name="line.683"></a>
-<span class="sourceLineNo">684</span><a name="line.684"></a>
-<span class="sourceLineNo">685</span>  /**<a name="line.685"></a>
-<span class="sourceLineNo">686</span>   * Get all backups history<a name="line.686"></a>
-<span class="sourceLineNo">687</span>   * @return list of backup info<a name="line.687"></a>
-<span class="sourceLineNo">688</span>   * @throws IOException if getting the backup history fails<a name="line.688"></a>
-<span class="sourceLineNo">689</span>   */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public List&lt;BackupInfo&gt; getBackupHistory() throws IOException {<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    return getBackupHistory(false);<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>  /**<a name="line.694"></a>
-<span class="sourceLineNo">695</span>   * Get first n backup history records<a name="line.695"></a>
-<span class="sourceLineNo">696</span>   * @param n number of records, if n== -1 - max number is ignored<a name="line.696"></a>
-<span class="sourceLineNo">697</span>   * @return list of records<a name="line.697"></a>
-<span class="sourceLineNo">698</span>   * @throws IOException if getting the backup history fails<a name="line.698"></a>
-<span class="sourceLineNo">699</span>   */<a name="line.699"></a>
-<span class="sourceLineNo">700</span>  public List&lt;BackupInfo&gt; getHistory(int n) throws IOException {<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    List&lt;BackupInfo&gt; history = getBackupHistory();<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    if (n == -1 || history.size() &lt;= n) {<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      return history;<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    return Collections.unmodifiableList(history.subList(0, n));<a name="line.705"></a>
-<span class="sourceLineNo">706</span>  }<a name="line.706"></a>
-<span class="sourceLineNo">707</span><a name="line.707"></a>
-<span class="sourceLineNo">708</span>  /**<a name="line.708"></a>
-<span class="sourceLineNo">709</span>   * Get backup history records filtered by list of filters.<a name="line.709"></a>
-<span class="sourceLineNo">710</span>   * @param n max number of records, if n == -1 , then max number is ignored<a name="line.710"></a>
-<span class="sourceLineNo">711</span>   * @param filters list of filters<a name="line.711"></a>
-<span class="sourceLineNo">712</span>   * @return backup records<a name="line.712"></a>
-<span class="sourceLineNo">713</span>   * @throws IOException if getting the backup history fails<a name="line.713"></a>
-<span class="sourceLineNo">714</span>   */<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  public List&lt;BackupInfo&gt; getBackupHistory(int n, BackupInfo.Filter... filters) throws IOException {<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    if (filters.length == 0) {<a name="line.716"></a>
-<span class="sourceLineNo">717</span>      return getHistory(n);<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    }<a name="line.718"></a>
-<span class="sourceLineNo">719</span><a name="line.719"></a>
-<span class="sourceLineNo">720</span>    List&lt;BackupInfo&gt; history = getBackupHistory();<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    List&lt;BackupInfo&gt; result = new ArrayList&lt;&gt;();<a name="line.721"></a>
-<span class="sourceLineNo">722</span>    for (BackupInfo bi : history) {<a name="line.722"></a>
-<span class="sourceLineNo">723</span>      if (n &gt;= 0 &amp;&amp; result.size() == n) {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>        break;<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      }<a name="line.725"></a>
-<span class="sourceLineNo">726</span><a name="line.726"></a>
-<span class="sourceLineNo">727</span>      boolean passed = true;<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      for (int i = 0; i &lt; filters.length; i++) {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>        if (!filters[i].apply(bi)) {<a name="line.729"></a>
-<span class="sourceLineNo">730</span>          passed = false;<a name="line.730"></a>
-<span class="sourceLineNo">731</span>          break;<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        }<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      }<a name="line.733"></a>
-<span class="sourceLineNo">734</span>      if (passed) {<a name="line.734"></a>
-<span class="sourceLineNo">735</span>        result.add(bi);<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      }<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    }<a name="line.737"></a>
-<span class="sourceLineNo">738</span>    return result;<a name="line.738"></a>
-<span class="sourceLineNo">739</span>  }<a name="line.739"></a>
-<span class="sourceLineNo">740</span><a name="line.740"></a>
-<span class="sourceLineNo">741</span>  /*<a name="line.741"></a>
-<span class="sourceLineNo">742</span>   * Retrieve TableName's for completed backup of given type<a name="line.742"></a>
-<span class="sourceLineNo">743</span>   * @param type backup type<a name="line.743"></a>
-<span class="sourceLineNo">744</span>   * @return List of table names<a name="line.744"></a>
-<span class="sourceLineNo">745</span>   */<a name="line.745"></a>
-<span class="sourceLineNo">746</span>  public List&lt;TableName&gt; getTablesForBackupType(BackupType type) throws IOException {<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    Set&lt;TableName&gt; names = new HashSet&lt;&gt;();<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    List&lt;BackupInfo&gt; infos = getBackupHistory(true);<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    for (BackupInfo info : infos) {<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      if (info.getType() == type) {<a name="line.750"></a>
-<span class="sourceLineNo">751</span>        names.addAll(info.getTableNames());<a name="line.751"></a>
-<span class="sourceLineNo">752</span>      }<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    }<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    return new ArrayList&lt;&gt;(names);<a name="line.754"></a>
-<span class="sourceLineNo">755</span>  }<a name="line.755"></a>
-<span class="sourceLineNo">756</span><a name="line.756"></a>
-<span class="sourceLineNo">757</span>  /**<a name="line.757"></a>
-<span class="sourceLineNo">758</span>   * Get history for backup destination<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   * @param backupRoot backup destination path<a name="line.759"></a>
-<span class="sourceLineNo">760</span>   * @return List of backup info<a name="line.760"></a>
-<span class="sourceLineNo">761</span>   * @throws IOException if getting the backup history fails<a name="line.761"></a>
-<span class="sourceLineNo">762</span>   */<a name="line.762"></a>
-<span class="sourceLineNo">763</span>  public List&lt;BackupInfo&gt; getBackupHistory(String backupRoot) throws IOException {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    ArrayList&lt;BackupInfo&gt; history = getBackupHistory(false);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    for (Iterator&lt;BackupInfo&gt; iterator = history.iterator(); iterator.hasNext();) {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>      BackupInfo info = iterator.next();<a name="line.766"></a>
-<span class="sourceLineNo">767</span>      if (!backupRoot.equals(info.getBackupRootDir())) {<a name="line.767"></a>
-<span class="sourceLineNo">768</span>        iterator.remove();<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      }<a name="line.769"></a>
-<span class="sourceLineNo">770</span>    }<a name="line.770"></a>
-<span class="sourceLineNo">771</span>    return history;<a name="line.771"></a>
-<span class="sourceLineNo">772</span>  }<a name="line.772"></a>
-<span class="sourceLineNo">773</span><a name="line.773"></a>
-<span class="sourceLineNo">774</span>  /**<a name="line.774"></a>
-<span class="sourceLineNo">775</span>   * Get history for a table<a name="line.775"></a>
-<span class="sourceLineNo">776</span>   * @param name table name<a name="line.776"></a>
-<span class="sourceLineNo">777</span>   * @return history for a table<a name="line.777"></a>
-<span class="sourceLineNo">778</span>   * @throws IOException if getting the backup history fails<a name="line.778"></a>
-<span class="sourceLineNo">779</span>   */<a name="line.779"></a>
-<span class="sourceLineNo">780</span>  public List&lt;BackupInfo&gt; getBackupHistoryForTable(TableName name) throws IOException {<a name="line.780"></a>
-<span class="sourceLineNo">781</span>    List&lt;BackupInfo&gt; history = getBackupHistory();<a name="line.781"></a>
-<span class="sourceLineNo">782</span>    List&lt;BackupInfo&gt; tableHistory = new ArrayList&lt;&gt;();<a name="line.782"></a>
-<span class="sourceLineNo">783</span>    for (BackupInfo info : history) {<a name="line.783"></a>
-<span class="sourceLineNo">784</span>      List&lt;TableName&gt; tables = info.getTableNames();<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      if (tables.contains(name)) {<a name="line.785"></a>
-<span class="sourceLineNo">786</span>        tableHistory.add(info);<a name="line.786"></a>
-<span class="sourceLineNo">787</span>      }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>    }<a name="line.788"></a>
-<span class="sourceLineNo">789</span>    return tableHistory;<a name="line.789"></a>
-<span class="sourceLineNo">790</span>  }<a name="line.790"></a>
-<span class="sourceLineNo">791</span><a name="line.791"></a>
-<span class="sourceLineNo">792</span>  public Map&lt;TableName, ArrayList&lt;BackupInfo&gt;&gt; getBackupHistoryForTableSet(Set&lt;TableName&gt; set,<a name="line.792"></a>
-<span class="sourceLineNo">793</span>      String backupRoot) throws IOException {<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    List&lt;BackupInfo&gt; history = getBackupHistory(backupRoot);<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    Map&lt;TableName, ArrayList&lt;BackupInfo&gt;&gt; tableHistoryMap = new HashMap&lt;&gt;();<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    for (Iterator&lt;BackupInfo&gt; iterator = history.iterator(); iterator.hasNext();) {<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      BackupInfo info = iterator.next();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>      if (!backupRoot.equals(info.getBackupRootDir())) {<a name="line.798"></a>
-<span class="sourceLineNo">799</span>        continue;<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      }<a name="line.800"></a>
-<span class="sourceLineNo">801</span>      List&lt;TableName&gt; tables = info.getTableNames();<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      for (TableName tableName : tables) {<a name="line.802"></a>
-<span class="sourceLineNo">803</span>        if (set.contains(tableName)) {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>          ArrayList&lt;BackupInfo&gt; list = tableHistoryMap.get(tableName);<a name="line.804"></a>
-<span class="sourceLineNo">805</span>          if (list == null) {<a name="line.805"></a>
-<span class="sourceLineNo">806</span>            list = new ArrayList&lt;&gt;();<a name="line.806"></a>
-<span class="sourceLineNo">807</span>            tableHistoryMap.put(tableName, list);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>          }<a name="line.808"></a>
-<span class="sourceLineNo">809</span>          list.add(info);<a name="line.809"></a>
-<span class="sourceLineNo">810</span>        }<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      }<a name="line.811"></a>
-<span class="sourceLineNo">812</span>    }<a name="line.812"></a>
-<span class="sourceLineNo">813</span>    return tableHistoryMap;<a name="line.813"></a>
-<span class="sourceLineNo">814</span>  }<a name="line.814"></a>
-<span class="sourceLineNo">815</span><a name="line.815"></a>
-<span class="sourceLineNo">816</span>  /**<a name="line.816"></a>
-<span class="sourceLineNo">817</span>   * Get all backup sessions with a given state (in descending order by time)<a name="line.817"></a>
-<span class="sourceLineNo">818</span>   * @param state backup session state<a name="line.818"></a>
-<span class="sourceLineNo">819</span>   * @return history info of backup info objects<a name="line.819"></a>
-<span class="sourceLineNo">820</span>   * @throws IOException exception<a name="line.820"></a>
-<span class="sourceLineNo">821</span>   */<a name="line.821"></a>
-<span class="sourceLineNo">822</span>  public ArrayList&lt;BackupInfo&gt; getBackupInfos(BackupState state) throws IOException {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>    LOG.trace("get backup infos from backup system table");<a name="line.823"></a>
-<span class="sourceLineNo">824</span><a name="line.824"></a>
-<span class="sourceLineNo">825</span>    Scan scan = createScanForBackupHistory();<a name="line.825"></a>
-<span class="sourceLineNo">826</span>    ArrayList&lt;BackupInfo&gt; list = new ArrayList&lt;&gt;();<a name="line.826"></a>
-<span class="sourceLineNo">827</span><a name="line.827"></a>
-<span class="sourceLineNo">828</span>    try (Table table = connection.getTable(tableName);<a name="line.828"></a>
-<span class="sourceLineNo">829</span>        ResultScanner scanner = table.getScanner(scan)) {<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      Result res;<a name="line.830"></a>
-<span class="sourceLineNo">831</span>      while ((res = scanner.next()) != null) {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>        res.advance();<a name="line.832"></a>
-<span class="sourceLineNo">833</span>        BackupInfo context = cellToBackupInfo(res.current());<a name="line.833"></a>
-<span class="sourceLineNo">834</span>        if (state != BackupState.ANY &amp;&amp; context.getState() != state) {<a name="line.834"></a>
-<span class="sourceLineNo">835</span>          continue;<a name="line.835"></a>
-<span class="sourceLineNo">836</span>        }<a name="line.836"></a>
-<span class="sourceLineNo">837</span>        list.add(context);<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      }<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      return list;<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
-<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
-<span class="sourceLineNo">842</span><a name="line.842"></a>
-<span class="sourceLineNo">843</span>  /**<a name="line.843"></a>
-<span class="sourceLineNo">844</span>   * Write the current timestamps for each regionserver to backup system table after a successful<a name="line.844"></a>
-<span class="sourceLineNo">845</span>   * full or incremental backup. The saved timestamp is of the last log file that was backed up<a name="line.845"></a>
-<span class="sourceLineNo">846</span>   * already.<a name="line.846"></a>
-<span class="sourceLineNo">847</span>   * @param tables tables<a name="line.847"></a>
-<span class="sourceLineNo">848</span>   * @param newTimestamps timestamps<a name="line.848"></a>
-<span class="sourceLineNo">849</span>   * @param backupRoot root directory path to backup<a name="line.849"></a>
-<span class="sourceLineNo">850</span>   * @throws IOException exception<a name="line.850"></a>
-<span class="sourceLineNo">851</span>   */<a name="line.851"></a>
-<span class="sourceLineNo">852</span>  public void writeRegionServerLogTimestamp(Set&lt;TableName&gt; tables,<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      HashMap&lt;String, Long&gt; newTimestamps, String backupRoot) throws IOException {<a name="line.853"></a>
-<span class="sourceLineNo">854</span>    if (LOG.isTraceEnabled()) {<a name="line.854"></a>
-<span class="sourceLineNo">855</span>      LOG.trace("write RS log time stamps to backup system table for tables ["<a name="line.855"></a>
-<span class="sourceLineNo">856</span>          + StringUtils.join(tables, ",") + "]");<a name="line.856"></a>
-<span class="sourceLineNo">857</span>    }<a name="line.857"></a>
-<span class="sourceLineNo">858</span>    List&lt;Put&gt; puts = new ArrayList&lt;&gt;();<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    for (TableName table : tables) {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>      byte[] smapData = toTableServerTimestampProto(table, newTimestamps).toByteArray();<a name="line.860"></a>
-<span class="sourceLineNo">861</span>      Put put = createPutForWriteRegionServerLogTimestamp(table, smapData, backupRoot);<a name="line.861"></a>
-<span class="sourceLineNo">862</span>      puts.add(put);<a name="line.862"></a>
-<span class="sourceLineNo">863</span>    }<a name="line.863"></a>
-<span class="sourceLineNo">864</span>    try (Table table = connection.getTable(tableName)) {<a name="line.864"></a>
-<span class="sourceLineNo">865</span>      table.put(puts);<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    }<a name="line.866"></a>
-<span class="sourceLineNo">867</span>  }<a name="line.867"></a>
-<span class="sourceLineNo">868</span><a name="line.868"></a>
-<span class="sourceLineNo">869</span>  /**<a name="line.869"></a>
-<span class="sourceLineNo">870</span>   * Read the timestamp for each region server log after the last successful backup. Each table has<a name="line.870"></a>
-<span class="sourceLineNo">871</span>   * its own set of the timestamps. The info is stored for each table as a concatenated string of<a name="line.871"></a>
-<span class="sourceLineNo">872</span>   * rs-&gt;timestapmp<a name="line.872"></a>
-<span class="sourceLineNo">873</span>   * @param backupRoot root directory path to backup<a name="line.873"></a>
-<span class="sourceLineNo">874</span>   * @return the timestamp for each region server. key: tableName value:<a name="line.874"></a>
-<span class="sourceLineNo">875</span>   *         RegionServer,PreviousTimeStamp<a name="line.875"></a>
-<span class="sourceLineNo">876</span>   * @throws IOException exception<a name="line.876"></a>
-<span class="sourceLineNo">877</span>   */<a name="line.877"></a>
-<span class="sourceLineNo">878</span>  public HashMap&lt;TableName, HashMap&lt;String, Long&gt;&gt; readLogTimestampMap(String backupRoot)<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      throws IOException {<a name="line.879"></a>
-<span class="sourceLineNo">880</span>    if (LOG.isTraceEnabled()) {<a name="line.880"></a>
-<span class="sourceLineNo">881</span>      LOG.trace("read RS log ts from backup system table for root=" + backupRoot);<a name="line.881"></a>
-<span class="sourceLineNo">882</span>    }<a name="line.882"></a>
-<span class="sourceLineNo">883</span><a name="line.883"></a>
-<span class="sourceLineNo">884</span>    HashMap&lt;TableName, HashMap&lt;String, Long&gt;&gt; tableTimestampMap = new HashMap&lt;&gt;();<a name="line.884"></a>
-<span class="sourceLineNo">885</span><a name="line.885"></a>
-<span class="sourceLineNo">886</span>    Scan scan = createScanForReadLogTimestampMap(backupRoot);<a name="line.886"></a>
-<span class="sourceLineNo">887</span>    try (Table table = connection.getTable(tableName);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>        ResultScanner scanner = table.getScanner(scan)) {<a name="line.888"></a>
-<span class="sourceLineNo">889</span>      Result res;<a name="line.889"></a>
-<span class="sourceLineNo">890</span>      while ((res = scanner.next()) != null) {<a name="line.890"></a>
-<span class="sourceLineNo">891</span>        res.advance();<a name="line.891"></a>
-<span class="sourceLineNo">892</span>        Cell cell = res.current();<a name="line.892"></a>
-<span class="sourceLineNo">893</span>        byte[] row = CellUtil.cloneRow(cell);<a name="line.893"></a>
-<span class="sourceLineNo">894</span>        String tabName = getTableNameForReadLogTimestampMap(row);<a name="line.894"></a>
-<span class="sourceLineNo">895</span>        TableName tn = TableName.valueOf(tabName);<a name="line.895"></a>
-<span class="sourceLineNo">896</span>        byte[] data = CellUtil.cloneValue(cell);<a name="line.896"></a>
-<span class="sourceLineNo">897</span>        if (data == null) {<a name="line.897"></a>
-<span class="sourceLineNo">898</span>          throw new IOException("Data of last backup data from backup system table "<a name="line.898"></a>
-<span class="sourceLineNo">899</span>              + "is empty. Create a backup first.");<a name="line.899"></a>
-<span class="sourceLineNo">900</span>        }<a name="line.900"></a>
-<span class="sourceLineNo">901</span>        if (data != null &amp;&amp; data.length &gt; 0) {<a name="line.901"></a>
-<span class="sourceLineNo">902</span>          HashMap&lt;String, Long&gt; lastBackup =<a name="line.902"></a>
-<span class="sourceLineNo">903</span>              fromTableServerTimestampProto(BackupProtos.TableServerTimestamp.parseFrom(data));<a name="line.903"></a>
-<span class="sourceLineNo">904</span>          tableTimestampMap.put(tn, lastBackup);<a name="line.904"></a>
-<span class="sourceLineNo">905</span>        }<a name="line.905"></a>
-<span class="sourceLineNo">906</span>      }<a name="line.906"></a>
-<span class="sourceLineNo">907</span>      return tableTimestampMap;<a name="line.907"></a>
-<span class="sourceLineNo">908</span>    }<a name="line.908"></a>
-<span class="sourceLineNo">909</span>  }<a name="line.909"></a>
-<span class="sourceLineNo">910</span><a name="line.910"></a>
-<span class="sourceLineNo">911</span>  private BackupProtos.TableServerTimestamp toTableServerTimestampProto(TableName table,<a name="line.911"></a>
-<span class="sourceLineNo">912</span>      Map&lt;String, Long&gt; map) {<a name="line.912"></a>
-<span class="sourceLineNo">913</span>    BackupProtos.TableServerTimestamp.Builder tstBuilder =<a name="line.913"></a>
-<span class="sourceLineNo">914</span>        BackupProtos.TableServerTimestamp.newBuilder();<a name="line.914"></a>
-<span class="sourceLineNo">915</span>    tstBuilder<a name="line.915"></a>
-<span class="sourceLineNo">916</span>    .setTableName(org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil.toProtoTableName(table));<a name="line.916"></a>
-<span class="sourceLineNo">917</span><a name="line.917"></a>
-<span class="sourceLineNo">918</span>    for (Entry&lt;String, Long&gt; entry : map.entrySet()) {<a name="line.918"></a>
-<span class="sourceLineNo">919</span>      BackupProtos.ServerTimestamp.Builder builder = BackupProtos.ServerTimestamp.newBuilder();<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      HBaseProtos.ServerName.Builder snBuilder = HBaseProtos.ServerName.newBuilder();<a name="line.920"></a>
-<span class="sourceLineNo">921</span>      ServerName sn = ServerName.parseServerName(entry.getKey());<a name="line.921"></a>
-<span class="sourceLineNo">922</span>      snBuilder.setHostName(sn.getHostname());<a name="line.922"></a>
-<span class="sourceLineNo">923</span>      snBuilder.setPort(sn.getPort());<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      builder.setServerName(snBuilder.build());<a name="line.924"></a>
-<span class="sourceLineNo">925</span>      builder.setTimestamp(entry.getValue());<a name="line.925"></a>
-<span class="sourceLineNo">926</span>      tstBuilder.addServerTimestamp(builder.build());<a name="line.926"></a>
-<span class="sourceLineNo">927</span>    }<a name="line.927"></a>
-<span class="sourceLineNo">928</span><a name="line.928"></a>
-<span class="sourceLineNo">929</span>    return tstBuilder.build();<a name="line.929"></a>
-<span class="sourceLineNo">930</span>  }<a name="line.930"></a>
-<span class="sourceLineNo">931</span><a name="line.931"></a>
-<span class="sourceLineNo">932</span>  private HashMap&lt;String, Long&gt;<a name="line.932"></a>
-<span class="sourceLineNo">933</span>    fromTableServerTimestampProto(BackupProtos.TableServerTimestamp proto) {<a name="line.933"></a>
-<span class="sourceLineNo">934</span><a name="line.934"></a>
-<span class="sourceLineNo">935</span>    HashMap&lt;String, Long&gt; map = new HashMap&lt;&gt;();<a name="line.935"></a>
-<span class="sourceLineNo">936</span>    List&lt;BackupProtos.ServerTimestamp&gt; list = proto.getServerTimestampList();<a name="line.936"></a>
-<span class="sourceLineNo">937</span>    for (BackupProtos.ServerTimestamp st : list) {<a name="line.937"></a>
-<span class="sourceLineNo">938</span>      ServerName sn =<a name="line.938"></a>
-<span class="sourceLineNo">939</span>          org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil.toServerName(st.getServerName());<a name="line.939"></a>
-<span class="sourceLineNo">940</span>      map.put(sn.getHostname() + ":" + sn.getPort(), st.getTimestamp());<a name="line.940"></a>
-<span class="sourceLineNo">941</span>    }<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    return map;<a name="line.942"></a>
-<span class="sourceLineNo">943</span>  }<a name="line.943"></a>
-<span class="sourceLineNo">944</span><a name="line.944"></a>
-<span class="sourceLineNo">945</span>  /**<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   * Return the current tables covered by incremental backup.<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   * @param backupRoot root directory path to backup<a name="line.947"></a>
-<span class="sourceLineNo">948</span>   * @return set of tableNames<a name="line.948"></a>
-<span class="sourceLineNo">949</span>   * @throws IOException exception<a name="line.949"></a>
-<span class="sourceLineNo">950</span>   */<a name="line.950"></a>
-<span class="sourceLineNo">951</span>  public Set&lt;TableName&gt; getIncrementalBackupTableSet(String backupRoot) throws IOException {<a name="line.951"></a>
-<span class="sourceLineNo">952</span>    LOG.trace("get incremental backup table set from backup system table");<a name="line.952"></a>
-<span class="sourceLineNo">953</span><a name="line.953"></a>
-<span class="sourceLineNo">954</span>    TreeSet&lt;TableName&gt; set = new TreeSet&lt;&gt;();<a name="line.954"></a>
-<span class="sourceLineNo">955</span><a name="line.955"></a>
-<span class="sourceLineNo">956</span>    try (Table table = connection.getTable(tableName)) {<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      Get get = createGetForIncrBackupTableSet(backupRoot);<a name="line.957"></a>
-<span class="sourceLineNo">958</span>      Result res = table.get(get);<a name="line.958"></a>
-<span class="sourceLineNo">959</span>      if (res.isEmpty()) {<a name="line.959"></a>
-<span class="sourceLineNo">960</span>        return set;<a name="line.960"></a>
-<span class="sourceLineNo">961</span>      }<a name="line.961"></a>
-<span class="sourceLineNo">962</span>      List&lt;Cell&gt; cells = res.listCells();<a name="line.962"></a>
-<span class="sourceLineNo">963</span>      for (Cell cell : cells) {<a name="line.963"></a>
-<span class="sourceLineNo">964</span>        // qualifier = table na

<TRUNCATED>

[43/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.html
index 2f0eda0..10fd671 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/HFileBlock.html" target="_top">Frames</a></li>
@@ -186,40 +186,44 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <th class="colLast" scope="col">Class and Description</th>
 </tr>
 <tr class="altColor">
+<td class="colFirst"><code>static class&nbsp;</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockDeserializer</a></span></code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) static interface&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockIterator</a></span></code>
 <div class="block">Iterator for <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFileBlock</code></a>s.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) static interface&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockWritable</a></span></code>
 <div class="block">Something that can be written into a block.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) static interface&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a></span></code>
 <div class="block">An HFile block reader with iteration ability.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) static class&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReaderImpl</a></span></code>
 <div class="block">Reads version 2 HFile blocks from the filesystem.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) static class&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Header.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.Header</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static class&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.PrefetchedHeader</a></span></code>
 <div class="block">Data-structure to use caching the header of the NEXT block.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) static class&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer</a></span></code>
 <div class="block">Unified version 2 <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> block writer.</div>
@@ -248,7 +252,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <th class="colLast" scope="col">Field and Description</th>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>(package private) static <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
+<td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#BLOCK_DESERIALIZER">BLOCK_DESERIALIZER</a></span></code>
 <div class="block">Used deserializing blocks from Cache.</div>
 </td>
@@ -968,7 +972,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>BLOCK_DESERIALIZER</h4>
-<pre>static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.256">BLOCK_DESERIALIZER</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.256">BLOCK_DESERIALIZER</a></pre>
 <div class="block">Used deserializing blocks from Cache.
 
  <code>
@@ -982,7 +986,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
  </code></div>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
-<dd><code>#serialize(ByteBuffer)</code></dd>
+<dd><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#serialize-java.nio.ByteBuffer-boolean-"><code>serialize(ByteBuffer, boolean)</code></a></dd>
 </dl>
 </li>
 </ul>
@@ -992,7 +996,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockListLast">
 <li class="blockList">
 <h4>DESERIALIZER_IDENTIFIER</h4>
-<pre>private static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.296">DESERIALIZER_IDENTIFIER</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.300">DESERIALIZER_IDENTIFIER</a></pre>
 </li>
 </ul>
 </li>
@@ -1009,7 +1013,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>HFileBlock</h4>
-<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.305">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;that)</pre>
+<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.309">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;that)</pre>
 <div class="block">Copy constructor. Creates a shallow copy of <code>that</code>'s buffer.</div>
 </li>
 </ul>
@@ -1019,7 +1023,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>HFileBlock</h4>
-<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.313">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;that,
+<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.317">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;that,
                    boolean&nbsp;bufCopy)</pre>
 <div class="block">Copy constructor. Creates a shallow/deep copy of <code>that</code>'s buffer as per the boolean
  param.</div>
@@ -1031,7 +1035,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>HFileBlock</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.344">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.348">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType,
                   int&nbsp;onDiskSizeWithoutHeader,
                   int&nbsp;uncompressedSizeWithoutHeader,
                   long&nbsp;prevBlockOffset,
@@ -1068,7 +1072,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HFileBlock</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.364">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buf,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.368">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buf,
            boolean&nbsp;usesHBaseChecksum,
            <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a>&nbsp;memType,
            long&nbsp;offset,
@@ -1101,7 +1105,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>init</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.404">init</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.408">init</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType,
                   int&nbsp;onDiskSizeWithoutHeader,
                   int&nbsp;uncompressedSizeWithoutHeader,
                   long&nbsp;prevBlockOffset,
@@ -1118,7 +1122,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnDiskSizeWithHeader</h4>
-<pre>private static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.424">getOnDiskSizeWithHeader</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;headerBuf,
+<pre>private static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.428">getOnDiskSizeWithHeader</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;headerBuf,
                                            boolean&nbsp;verifyChecksum)</pre>
 <div class="block">Parse total on disk size including header and checksum.</div>
 <dl>
@@ -1136,7 +1140,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getNextBlockOnDiskSize</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.435">getNextBlockOnDiskSize</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.439">getNextBlockOnDiskSize</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the on-disk size of the next block (including the header size and any checksums if
@@ -1151,7 +1155,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockType</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.440">getBlockType</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.444">getBlockType</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html#getBlockType--">getBlockType</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a></code></dd>
@@ -1166,7 +1170,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getDataBlockEncodingId</h4>
-<pre>short&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.445">getDataBlockEncodingId</a>()</pre>
+<pre>short&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.449">getDataBlockEncodingId</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>get data block encoding id that was used to encode this block</dd>
@@ -1179,7 +1183,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnDiskSizeWithHeader</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.456">getOnDiskSizeWithHeader</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.460">getOnDiskSizeWithHeader</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the on-disk size of header + data part + checksum.</dd>
@@ -1192,7 +1196,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnDiskSizeWithoutHeader</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.463">getOnDiskSizeWithoutHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.467">getOnDiskSizeWithoutHeader</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the on-disk size of the data part + checksum (header excluded).</dd>
@@ -1205,7 +1209,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getUncompressedSizeWithoutHeader</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.470">getUncompressedSizeWithoutHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.474">getUncompressedSizeWithoutHeader</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the uncompressed size of data part (header and checksum excluded).</dd>
@@ -1218,7 +1222,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getPrevBlockOffset</h4>
-<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.478">getPrevBlockOffset</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.482">getPrevBlockOffset</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the offset of the previous block of the same type in the file, or
@@ -1232,7 +1236,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>overwriteHeader</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.486">overwriteHeader</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.490">overwriteHeader</a>()</pre>
 <div class="block">Rewinds <code>buf</code> and writes first 4 header fields. <code>buf</code> position
  is modified as side-effect.</div>
 </li>
@@ -1243,7 +1247,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getBufferWithoutHeader</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.504">getBufferWithoutHeader</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.508">getBufferWithoutHeader</a>()</pre>
 <div class="block">Returns a buffer that does not include the header or checksum.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -1257,7 +1261,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getBufferReadOnly</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.520">getBufferReadOnly</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.524">getBufferReadOnly</a>()</pre>
 <div class="block">Returns a read-only duplicate of the buffer this block stores internally ready to be read.
  Clients must not modify the buffer object though they may set position and limit on the
  returned buffer since we pass back a duplicate. This method has to be public because it is used
@@ -1276,7 +1280,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>sanityCheckAssertion</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.528">sanityCheckAssertion</a>(long&nbsp;valueFromBuf,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.532">sanityCheckAssertion</a>(long&nbsp;valueFromBuf,
                                   long&nbsp;valueFromField,
                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;fieldName)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1292,7 +1296,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>sanityCheckAssertion</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.537">sanityCheckAssertion</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;valueFromBuf,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.541">sanityCheckAssertion</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;valueFromBuf,
                                   <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;valueFromField)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -1307,7 +1311,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>sanityCheck</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.554">sanityCheck</a>()
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.558">sanityCheck</a>()
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Checks if the block is internally consistent, i.e. the first
  <a href="../../../../../../org/apache/hadoop/hbase/HConstants.html#HFILEBLOCK_HEADER_SIZE"><code>HConstants.HFILEBLOCK_HEADER_SIZE</code></a> bytes of the buffer contain a
@@ -1327,7 +1331,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.588">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.592">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -1340,7 +1344,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>unpack</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.633">unpack</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.637">unpack</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext,
                   <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a>&nbsp;reader)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Retrieves the decompressed/decrypted view of this block. An encoded block remains in its
@@ -1357,7 +1361,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>allocateBuffer</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.661">allocateBuffer</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.665">allocateBuffer</a>()</pre>
 <div class="block">Always allocates a new buffer of the correct size. Copies header bytes
  from the existing buffer. Does not change header fields.
  Reserve room to keep checksum bytes too.</div>
@@ -1369,7 +1373,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>isUnpacked</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.683">isUnpacked</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.687">isUnpacked</a>()</pre>
 <div class="block">Return true when this block's buffer has been unpacked, false otherwise. Note this is a
  calculated heuristic, not tracked attribute of the block.</div>
 </li>
@@ -1380,7 +1384,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>sanityCheckUncompressedSize</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.693">sanityCheckUncompressedSize</a>()
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.697">sanityCheckUncompressedSize</a>()
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">An additional sanity-check in case no compression or encryption is being used.</div>
 <dl>
@@ -1395,7 +1399,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getOffset</h4>
-<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.707">getOffset</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.711">getOffset</a>()</pre>
 <div class="block">Cannot be <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#UNSET"><code>UNSET</code></a>. Must be a legitimate value. Used re-making the <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>BlockCacheKey</code></a> when
  block is returned to the cache.</div>
 <dl>
@@ -1410,7 +1414,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getByteStream</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.717">getByteStream</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.721">getByteStream</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>a byte stream reading the data + checksum of this block</dd>
@@ -1423,7 +1427,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>heapSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.724">heapSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.728">heapSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html#heapSize--">heapSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></code></dd>
@@ -1439,7 +1443,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>readWithExtra</h4>
-<pre>static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.760">readWithExtra</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a>&nbsp;in,
+<pre>static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.764">readWithExtra</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a>&nbsp;in,
                              byte[]&nbsp;buf,
                              int&nbsp;bufOffset,
                              int&nbsp;necessaryLen,
@@ -1469,7 +1473,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>positionalReadWithExtra</h4>
-<pre>static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.801">positionalReadWithExtra</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;in,
+<pre>static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.805">positionalReadWithExtra</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;in,
                                        long&nbsp;position,
                                        byte[]&nbsp;buf,
                                        int&nbsp;bufOffset,
@@ -1504,7 +1508,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>sanityCheckUncompressed</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1876">sanityCheckUncompressed</a>()
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1880">sanityCheckUncompressed</a>()
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">An additional sanity-check in case no compression or encryption is being used.</div>
 <dl>
@@ -1519,7 +1523,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getSerializedLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1888">getSerializedLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1892">getSerializedLength</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html#getSerializedLength--">Cacheable</a></code></span></div>
 <div class="block">Returns the length of the ByteBuffer required to serialized the object. If the
  object cannot be serialized, it should return 0.</div>
@@ -1537,7 +1541,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>serialize</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1898">serialize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;destination,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1902">serialize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;destination,
                       boolean&nbsp;includeNextBlockMetadata)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html#serialize-java.nio.ByteBuffer-boolean-">Cacheable</a></code></span></div>
 <div class="block">Serializes its data into destination.</div>
@@ -1556,7 +1560,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaData</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1911">getMetaData</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1915">getMetaData</a>()</pre>
 <div class="block">For use by bucketcache. This exposes internals.</div>
 </li>
 </ul>
@@ -1566,7 +1570,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>addMetaData</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1922">addMetaData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;destination,
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1926">addMetaData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;destination,
                                boolean&nbsp;includeNextBlockMetadata)</pre>
 <div class="block">Adds metadata at current position (position is moved forward). Does not flip or reset.</div>
 <dl>
@@ -1581,7 +1585,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getDeserializer</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1933">getDeserializer</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1937">getDeserializer</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html#getDeserializer--">Cacheable</a></code></span></div>
 <div class="block">Returns CacheableDeserializer instance which reconstructs original object from ByteBuffer.</div>
 <dl>
@@ -1598,7 +1602,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>hashCode</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1938">hashCode</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1942">hashCode</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--" title="class or interface in java.lang">hashCode</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -1611,7 +1615,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>equals</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1951">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;comparison)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1955">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;comparison)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.lang">equals</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -1624,7 +1628,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getDataBlockEncoding</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1990">getDataBlockEncoding</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1994">getDataBlockEncoding</a>()</pre>
 </li>
 </ul>
 <a name="getChecksumType--">
@@ -1633,7 +1637,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getChecksumType</h4>
-<pre>byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1998">getChecksumType</a>()</pre>
+<pre>byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2002">getChecksumType</a>()</pre>
 </li>
 </ul>
 <a name="getBytesPerChecksum--">
@@ -1642,7 +1646,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getBytesPerChecksum</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2002">getBytesPerChecksum</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2006">getBytesPerChecksum</a>()</pre>
 </li>
 </ul>
 <a name="getOnDiskDataSizeWithHeader--">
@@ -1651,7 +1655,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnDiskDataSizeWithHeader</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2008">getOnDiskDataSizeWithHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2012">getOnDiskDataSizeWithHeader</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the size of data on disk + header. Excludes checksum.</dd>
@@ -1664,7 +1668,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>totalChecksumBytes</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2016">totalChecksumBytes</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2020">totalChecksumBytes</a>()</pre>
 <div class="block">Calculate the number of bytes required to store all the checksums
  for this block. Each checksum value is a 4 byte integer.</div>
 </li>
@@ -1675,7 +1679,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>headerSize</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2031">headerSize</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2035">headerSize</a>()</pre>
 <div class="block">Returns the size of this block header.</div>
 </li>
 </ul>
@@ -1685,7 +1689,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>headerSize</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2038">headerSize</a>(boolean&nbsp;usesHBaseChecksum)</pre>
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2042">headerSize</a>(boolean&nbsp;usesHBaseChecksum)</pre>
 <div class="block">Maps a minor version to the size of the header.</div>
 </li>
 </ul>
@@ -1695,7 +1699,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getDummyHeaderForVersion</h4>
-<pre>byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2048">getDummyHeaderForVersion</a>()</pre>
+<pre>byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2052">getDummyHeaderForVersion</a>()</pre>
 <div class="block">Return the appropriate DUMMY_HEADER for the minor version</div>
 </li>
 </ul>
@@ -1705,7 +1709,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getDummyHeaderForVersion</h4>
-<pre>private static&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2055">getDummyHeaderForVersion</a>(boolean&nbsp;usesHBaseChecksum)</pre>
+<pre>private static&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2059">getDummyHeaderForVersion</a>(boolean&nbsp;usesHBaseChecksum)</pre>
 <div class="block">Return the appropriate DUMMY_HEADER for the minor version</div>
 </li>
 </ul>
@@ -1715,7 +1719,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getHFileContext</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2063">getHFileContext</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2067">getHFileContext</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>This HFileBlocks fileContext which will a derivative of the
@@ -1729,7 +1733,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getMemoryType</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2068">getMemoryType</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2072">getMemoryType</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html#getMemoryType--">getMemoryType</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a></code></dd>
@@ -1744,7 +1748,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>usesSharedMemory</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2075">usesSharedMemory</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2079">usesSharedMemory</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if this block is backed by a shared memory area(such as that of a BucketCache).</dd>
@@ -1757,7 +1761,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>toStringHeader</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2085">toStringHeader</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buf)
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2089">toStringHeader</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buf)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Convert the contents of the block header into a human readable string.
  This is mostly helpful for debugging. This assumes that the block
@@ -1774,7 +1778,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockListLast">
 <li class="blockList">
 <h4>deepClone</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2107">deepClone</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.2111">deepClone</a>()</pre>
 </li>
 </ul>
 </li>
@@ -1806,7 +1810,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/HFileBlock.html" target="_top">Frames</a></li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
index 933760d..6bce66c 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1286">BucketCache.BucketEntry</a>
+<pre>static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1304">BucketCache.BucketEntry</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a></pre>
 <div class="block">Item in cache. We expect this to be where most memory goes. Java uses 8
@@ -164,7 +164,9 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>(package private) byte</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserialiserIndex">deserialiserIndex</a></span></code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserialiserIndex">deserialiserIndex</a></span></code>
+<div class="block">The index of the deserializer that can deserialize this BucketEntry content.</div>
+</td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>private int</code></td>
@@ -233,46 +235,49 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 </tr>
 <tr id="i2" class="altColor">
 <td class="colFirst"><code>protected <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">deserializerReference</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference--">deserializerReference</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i3" class="rowColor">
 <td class="colFirst"><code>long</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getCachedTime--">getCachedTime</a></span>()</code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getAccessCounter--">getAccessCounter</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i4" class="altColor">
+<td class="colFirst"><code>long</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getCachedTime--">getCachedTime</a></span>()</code>&nbsp;</td>
+</tr>
+<tr id="i5" class="rowColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getLength--">getLength</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i5" class="rowColor">
+<tr id="i6" class="altColor">
 <td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getPriority--">getPriority</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i6" class="altColor">
+<tr id="i7" class="rowColor">
 <td class="colFirst"><code>protected int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getRefCount--">getRefCount</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i7" class="rowColor">
+<tr id="i8" class="altColor">
 <td class="colFirst"><code>protected int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#incrementRefCountAndGet--">incrementRefCountAndGet</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i8" class="altColor">
+<tr id="i9" class="rowColor">
 <td class="colFirst"><code>protected boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#isMarkedForEvict--">isMarkedForEvict</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i9" class="rowColor">
+<tr id="i10" class="altColor">
 <td class="colFirst"><code>protected void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#markForEvict--">markForEvict</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i10" class="altColor">
+<tr id="i11" class="rowColor">
 <td class="colFirst"><code>(package private) long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#offset--">offset</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i11" class="rowColor">
+<tr id="i12" class="altColor">
 <td class="colFirst"><code>protected void</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">setDeserialiserReference</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;deserializer,
-                        <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-">setDeserialiserReference</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;deserializer)</code>&nbsp;</td>
 </tr>
-<tr id="i12" class="altColor">
+<tr id="i13" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#setOffset-long-">setOffset</a></span>(long&nbsp;value)</code>&nbsp;</td>
 </tr>
@@ -304,7 +309,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>serialVersionUID</h4>
-<pre>private static final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1287">serialVersionUID</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1305">serialVersionUID</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry.serialVersionUID">Constant Field Values</a></dd>
@@ -317,7 +322,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>COMPARATOR</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true" title="class or interface in java.util">Comparator</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1290">COMPARATOR</a></pre>
+<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true" title="class or interface in java.util">Comparator</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1308">COMPARATOR</a></pre>
 </li>
 </ul>
 <a name="offsetBase">
@@ -326,7 +331,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>offsetBase</h4>
-<pre>private&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1298">offsetBase</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1311">offsetBase</a></pre>
 </li>
 </ul>
 <a name="length">
@@ -335,7 +340,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>length</h4>
-<pre>private&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1299">length</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1312">length</a></pre>
 </li>
 </ul>
 <a name="offset1">
@@ -344,7 +349,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>offset1</h4>
-<pre>private&nbsp;byte <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1300">offset1</a></pre>
+<pre>private&nbsp;byte <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1313">offset1</a></pre>
 </li>
 </ul>
 <a name="deserialiserIndex">
@@ -353,7 +358,9 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>deserialiserIndex</h4>
-<pre>byte <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1301">deserialiserIndex</a></pre>
+<pre>byte <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1319">deserialiserIndex</a></pre>
+<div class="block">The index of the deserializer that can deserialize this BucketEntry content.
+ See <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>CacheableDeserializerIdManager</code></a> for hosting of index to serializers.</div>
 </li>
 </ul>
 <a name="accessCounter">
@@ -362,7 +369,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>accessCounter</h4>
-<pre>private volatile&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1302">accessCounter</a></pre>
+<pre>private volatile&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1321">accessCounter</a></pre>
 </li>
 </ul>
 <a name="priority">
@@ -371,7 +378,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>priority</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1303">priority</a></pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1322">priority</a></pre>
 </li>
 </ul>
 <a name="cachedTime">
@@ -380,7 +387,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockListLast">
 <li class="blockList">
 <h4>cachedTime</h4>
-<pre>private final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1308">cachedTime</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1327">cachedTime</a></pre>
 <div class="block">Time this block was cached.  Presumes we are created just before we are added to the cache.</div>
 </li>
 </ul>
@@ -398,7 +405,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BucketEntry</h4>
-<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1310">BucketEntry</a>(long&nbsp;offset,
+<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1329">BucketEntry</a>(long&nbsp;offset,
             int&nbsp;length,
             long&nbsp;accessCounter,
             boolean&nbsp;inMemory)</pre>
@@ -418,7 +425,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>offset</h4>
-<pre>long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1321">offset</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1340">offset</a>()</pre>
 </li>
 </ul>
 <a name="setOffset-long-">
@@ -427,7 +434,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>setOffset</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1327">setOffset</a>(long&nbsp;value)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1346">setOffset</a>(long&nbsp;value)</pre>
 </li>
 </ul>
 <a name="getLength--">
@@ -436,26 +443,34 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>getLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1334">getLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1353">getLength</a>()</pre>
 </li>
 </ul>
-<a name="deserializerReference-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">
+<a name="deserializerReference--">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>deserializerReference</h4>
-<pre>protected&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1338">deserializerReference</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</pre>
+<pre>protected&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1357">deserializerReference</a>()</pre>
 </li>
 </ul>
-<a name="setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">
+<a name="setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>setDeserialiserReference</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1344">setDeserialiserReference</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;deserializer,
-                                        <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1361">setDeserialiserReference</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;deserializer)</pre>
+</li>
+</ul>
+<a name="getAccessCounter--">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>getAccessCounter</h4>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1365">getAccessCounter</a>()</pre>
 </li>
 </ul>
 <a name="access-long-">
@@ -464,7 +479,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>access</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1354">access</a>(long&nbsp;accessCounter)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1372">access</a>(long&nbsp;accessCounter)</pre>
 <div class="block">Block has been accessed. Update its local access counter.</div>
 </li>
 </ul>
@@ -474,7 +489,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>getPriority</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1361">getPriority</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1379">getPriority</a>()</pre>
 </li>
 </ul>
 <a name="getCachedTime--">
@@ -483,7 +498,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>getCachedTime</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1365">getCachedTime</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1383">getCachedTime</a>()</pre>
 </li>
 </ul>
 <a name="getRefCount--">
@@ -492,7 +507,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>getRefCount</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1369">getRefCount</a>()</pre>
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1387">getRefCount</a>()</pre>
 </li>
 </ul>
 <a name="incrementRefCountAndGet--">
@@ -501,7 +516,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>incrementRefCountAndGet</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1373">incrementRefCountAndGet</a>()</pre>
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1391">incrementRefCountAndGet</a>()</pre>
 </li>
 </ul>
 <a name="decrementRefCountAndGet--">
@@ -510,7 +525,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>decrementRefCountAndGet</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1377">decrementRefCountAndGet</a>()</pre>
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1395">decrementRefCountAndGet</a>()</pre>
 </li>
 </ul>
 <a name="isMarkedForEvict--">
@@ -519,7 +534,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockList">
 <li class="blockList">
 <h4>isMarkedForEvict</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1381">isMarkedForEvict</a>()</pre>
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1399">isMarkedForEvict</a>()</pre>
 </li>
 </ul>
 <a name="markForEvict--">
@@ -528,7 +543,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializab
 <ul class="blockListLast">
 <li class="blockList">
 <h4>markForEvict</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1385">markForEvict</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1403">markForEvict</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
index 7122679..3722981 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1433">BucketCache.BucketEntryGroup</a>
+<pre>private class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1451">BucketCache.BucketEntryGroup</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Used to group bucket entries into priority buckets. There will be a
  BucketEntryGroup for each priority (single, multi, memory). Once bucketed,
@@ -227,7 +227,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>queue</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">CachedEntryQueue</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1435">queue</a></pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">CachedEntryQueue</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1453">queue</a></pre>
 </li>
 </ul>
 <a name="totalSize">
@@ -236,7 +236,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>totalSize</h4>
-<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1436">totalSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1454">totalSize</a></pre>
 </li>
 </ul>
 <a name="bucketSize">
@@ -245,7 +245,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>bucketSize</h4>
-<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1437">bucketSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1455">bucketSize</a></pre>
 </li>
 </ul>
 </li>
@@ -262,7 +262,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BucketEntryGroup</h4>
-<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1439">BucketEntryGroup</a>(long&nbsp;bytesToFree,
+<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1457">BucketEntryGroup</a>(long&nbsp;bytesToFree,
                         long&nbsp;blockSize,
                         long&nbsp;bucketSize)</pre>
 </li>
@@ -281,7 +281,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>add</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1445">add</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;&nbsp;block)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1463">add</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;&nbsp;block)</pre>
 </li>
 </ul>
 <a name="free-long-">
@@ -290,7 +290,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>free</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1450">free</a>(long&nbsp;toFree)</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1468">free</a>(long&nbsp;toFree)</pre>
 </li>
 </ul>
 <a name="overflow--">
@@ -299,7 +299,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>overflow</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1466">overflow</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1484">overflow</a>()</pre>
 </li>
 </ul>
 <a name="totalSize--">
@@ -308,7 +308,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>totalSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1470">totalSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1488">totalSize</a>()</pre>
 </li>
 </ul>
 </li>


[10/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
index 69db023..59daaeb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
@@ -196,7 +196,7 @@
 <span class="sourceLineNo">188</span>      throw new UnknownRegionException("No RegionState found for " + ri.getEncodedName());<a name="line.188"></a>
 <span class="sourceLineNo">189</span>    }<a name="line.189"></a>
 <span class="sourceLineNo">190</span>    if (!rs.isOpened()) {<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      throw new DoNotRetryRegionException(ri.getEncodedName() + " is not OPEN");<a name="line.191"></a>
+<span class="sourceLineNo">191</span>      throw new DoNotRetryRegionException(ri.getEncodedName() + " is not OPEN; regionState=" + rs);<a name="line.191"></a>
 <span class="sourceLineNo">192</span>    }<a name="line.192"></a>
 <span class="sourceLineNo">193</span>    if (ri.isSplitParent()) {<a name="line.193"></a>
 <span class="sourceLineNo">194</span>      throw new DoNotRetryRegionException(ri.getEncodedName() +<a name="line.194"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
index 32d662d..e5a5866 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
@@ -102,7 +102,7 @@
 <span class="sourceLineNo">094</span>          }<a name="line.94"></a>
 <span class="sourceLineNo">095</span><a name="line.95"></a>
 <span class="sourceLineNo">096</span>          // TODO: Move out... in the acquireLock()<a name="line.96"></a>
-<span class="sourceLineNo">097</span>          LOG.debug("Waiting for '" + getTableName() + "' regions in transition");<a name="line.97"></a>
+<span class="sourceLineNo">097</span>          LOG.debug("Waiting for RIT for {}", this);<a name="line.97"></a>
 <span class="sourceLineNo">098</span>          regions = env.getAssignmentManager().getRegionStates().getRegionsOfTable(getTableName());<a name="line.98"></a>
 <span class="sourceLineNo">099</span>          assert regions != null &amp;&amp; !regions.isEmpty() : "unexpected 0 regions";<a name="line.99"></a>
 <span class="sourceLineNo">100</span>          ProcedureSyncWait.waitRegionInTransition(env, regions);<a name="line.100"></a>
@@ -113,29 +113,29 @@
 <span class="sourceLineNo">105</span>          setNextState(DeleteTableState.DELETE_TABLE_REMOVE_FROM_META);<a name="line.105"></a>
 <span class="sourceLineNo">106</span>          break;<a name="line.106"></a>
 <span class="sourceLineNo">107</span>        case DELETE_TABLE_REMOVE_FROM_META:<a name="line.107"></a>
-<span class="sourceLineNo">108</span>          LOG.debug("delete '" + getTableName() + "' regions from META");<a name="line.108"></a>
+<span class="sourceLineNo">108</span>          LOG.debug("Deleting regions from META for {}", this);<a name="line.108"></a>
 <span class="sourceLineNo">109</span>          DeleteTableProcedure.deleteFromMeta(env, getTableName(), regions);<a name="line.109"></a>
 <span class="sourceLineNo">110</span>          setNextState(DeleteTableState.DELETE_TABLE_CLEAR_FS_LAYOUT);<a name="line.110"></a>
 <span class="sourceLineNo">111</span>          break;<a name="line.111"></a>
 <span class="sourceLineNo">112</span>        case DELETE_TABLE_CLEAR_FS_LAYOUT:<a name="line.112"></a>
-<span class="sourceLineNo">113</span>          LOG.debug("delete '" + getTableName() + "' from filesystem");<a name="line.113"></a>
+<span class="sourceLineNo">113</span>          LOG.debug("Deleting regions from filesystem for {}", this);<a name="line.113"></a>
 <span class="sourceLineNo">114</span>          DeleteTableProcedure.deleteFromFs(env, getTableName(), regions, true);<a name="line.114"></a>
 <span class="sourceLineNo">115</span>          setNextState(DeleteTableState.DELETE_TABLE_UPDATE_DESC_CACHE);<a name="line.115"></a>
 <span class="sourceLineNo">116</span>          regions = null;<a name="line.116"></a>
 <span class="sourceLineNo">117</span>          break;<a name="line.117"></a>
 <span class="sourceLineNo">118</span>        case DELETE_TABLE_UPDATE_DESC_CACHE:<a name="line.118"></a>
-<span class="sourceLineNo">119</span>          LOG.debug("delete '" + getTableName() + "' descriptor");<a name="line.119"></a>
+<span class="sourceLineNo">119</span>          LOG.debug("Deleting descriptor for {}", this);<a name="line.119"></a>
 <span class="sourceLineNo">120</span>          DeleteTableProcedure.deleteTableDescriptorCache(env, getTableName());<a name="line.120"></a>
 <span class="sourceLineNo">121</span>          setNextState(DeleteTableState.DELETE_TABLE_UNASSIGN_REGIONS);<a name="line.121"></a>
 <span class="sourceLineNo">122</span>          break;<a name="line.122"></a>
 <span class="sourceLineNo">123</span>        case DELETE_TABLE_UNASSIGN_REGIONS:<a name="line.123"></a>
-<span class="sourceLineNo">124</span>          LOG.debug("delete '" + getTableName() + "' assignment state");<a name="line.124"></a>
+<span class="sourceLineNo">124</span>          LOG.debug("Deleting assignment state for {}", this);<a name="line.124"></a>
 <span class="sourceLineNo">125</span>          DeleteTableProcedure.deleteAssignmentState(env, getTableName());<a name="line.125"></a>
 <span class="sourceLineNo">126</span>          setNextState(DeleteTableState.DELETE_TABLE_POST_OPERATION);<a name="line.126"></a>
 <span class="sourceLineNo">127</span>          break;<a name="line.127"></a>
 <span class="sourceLineNo">128</span>        case DELETE_TABLE_POST_OPERATION:<a name="line.128"></a>
 <span class="sourceLineNo">129</span>          postDelete(env);<a name="line.129"></a>
-<span class="sourceLineNo">130</span>          LOG.debug("delete '" + getTableName() + "' completed");<a name="line.130"></a>
+<span class="sourceLineNo">130</span>          LOG.debug("Finished {}", this);<a name="line.130"></a>
 <span class="sourceLineNo">131</span>          return Flow.NO_MORE_STATE;<a name="line.131"></a>
 <span class="sourceLineNo">132</span>        default:<a name="line.132"></a>
 <span class="sourceLineNo">133</span>          throw new UnsupportedOperationException("unhandled state=" + state);<a name="line.133"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
index 2f83467..3e6a53e 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
@@ -150,7 +150,7 @@
 <span class="sourceLineNo">142</span>      if (isRollbackSupported(state)) {<a name="line.142"></a>
 <span class="sourceLineNo">143</span>        setFailure("master-disable-table", e);<a name="line.143"></a>
 <span class="sourceLineNo">144</span>      } else {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>        LOG.warn("Retriable error trying to disable table={} (in state={})", tableName, state, e);<a name="line.145"></a>
+<span class="sourceLineNo">145</span>        LOG.warn("Retryable error in {}", this, e);<a name="line.145"></a>
 <span class="sourceLineNo">146</span>      }<a name="line.146"></a>
 <span class="sourceLineNo">147</span>    }<a name="line.147"></a>
 <span class="sourceLineNo">148</span>    return Flow.HAS_MORE_STATE;<a name="line.148"></a>
@@ -264,7 +264,7 @@
 <span class="sourceLineNo">256</span>      TableStateManager tsm = env.getMasterServices().getTableStateManager();<a name="line.256"></a>
 <span class="sourceLineNo">257</span>      TableState ts = tsm.getTableState(tableName);<a name="line.257"></a>
 <span class="sourceLineNo">258</span>      if (!ts.isEnabled()) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>        LOG.info("Not ENABLED tableState=" + ts + "; skipping disable");<a name="line.259"></a>
+<span class="sourceLineNo">259</span>        LOG.info("Not ENABLED skipping {}", this);<a name="line.259"></a>
 <span class="sourceLineNo">260</span>        setFailure("master-disable-table", new TableNotEnabledException(ts.toString()));<a name="line.260"></a>
 <span class="sourceLineNo">261</span>        canTableBeDisabled = false;<a name="line.261"></a>
 <span class="sourceLineNo">262</span>      }<a name="line.262"></a>
@@ -300,71 +300,72 @@
 <span class="sourceLineNo">292</span>    env.getMasterServices().getTableStateManager().setTableState(<a name="line.292"></a>
 <span class="sourceLineNo">293</span>      tableName,<a name="line.293"></a>
 <span class="sourceLineNo">294</span>      TableState.State.DISABLING);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /**<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * Mark table state to Disabled<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param env MasterProcedureEnv<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @throws IOException<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   */<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  protected static void setTableStateToDisabled(<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      final MasterProcedureEnv env,<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      final TableName tableName) throws IOException {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    // Flip the table to disabled<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    env.getMasterServices().getTableStateManager().setTableState(<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      tableName,<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      TableState.State.DISABLED);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    LOG.info("Disabled table, " + tableName + ", is completed.");<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
-<span class="sourceLineNo">311</span><a name="line.311"></a>
-<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
-<span class="sourceLineNo">313</span>   * Action after disabling table.<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * @param env MasterProcedureEnv<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * @param state the procedure state<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * @throws IOException<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   * @throws InterruptedException<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   */<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  protected void postDisable(final MasterProcedureEnv env, final DisableTableState state)<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      throws IOException, InterruptedException {<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    runCoprocessorAction(env, state);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * The procedure could be restarted from a different machine. If the variable is null, we need to<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * retrieve it.<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * @return traceEnabled<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  private Boolean isTraceEnabled() {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    if (traceEnabled == null) {<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      traceEnabled = LOG.isTraceEnabled();<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    }<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    return traceEnabled;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
-<span class="sourceLineNo">335</span><a name="line.335"></a>
-<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * Coprocessor Action.<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * @param env MasterProcedureEnv<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @param state the procedure state<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * @throws IOException<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   * @throws InterruptedException<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  private void runCoprocessorAction(final MasterProcedureEnv env, final DisableTableState state)<a name="line.343"></a>
-<span class="sourceLineNo">344</span>      throws IOException, InterruptedException {<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    if (cpHost != null) {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      switch (state) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>        case DISABLE_TABLE_PRE_OPERATION:<a name="line.348"></a>
-<span class="sourceLineNo">349</span>          cpHost.preDisableTableAction(tableName, getUser());<a name="line.349"></a>
-<span class="sourceLineNo">350</span>          break;<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        case DISABLE_TABLE_POST_OPERATION:<a name="line.351"></a>
-<span class="sourceLineNo">352</span>          cpHost.postCompletedDisableTableAction(tableName, getUser());<a name="line.352"></a>
-<span class="sourceLineNo">353</span>          break;<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        default:<a name="line.354"></a>
-<span class="sourceLineNo">355</span>          throw new UnsupportedOperationException(this + " unhandled state=" + state);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      }<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
-<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
-<span class="sourceLineNo">359</span>}<a name="line.359"></a>
+<span class="sourceLineNo">295</span>    LOG.info("Set {} to state={}", tableName, TableState.State.DISABLING);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  }<a name="line.296"></a>
+<span class="sourceLineNo">297</span><a name="line.297"></a>
+<span class="sourceLineNo">298</span>  /**<a name="line.298"></a>
+<span class="sourceLineNo">299</span>   * Mark table state to Disabled<a name="line.299"></a>
+<span class="sourceLineNo">300</span>   * @param env MasterProcedureEnv<a name="line.300"></a>
+<span class="sourceLineNo">301</span>   * @throws IOException<a name="line.301"></a>
+<span class="sourceLineNo">302</span>   */<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  protected static void setTableStateToDisabled(<a name="line.303"></a>
+<span class="sourceLineNo">304</span>      final MasterProcedureEnv env,<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      final TableName tableName) throws IOException {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    // Flip the table to disabled<a name="line.306"></a>
+<span class="sourceLineNo">307</span>    env.getMasterServices().getTableStateManager().setTableState(<a name="line.307"></a>
+<span class="sourceLineNo">308</span>      tableName,<a name="line.308"></a>
+<span class="sourceLineNo">309</span>      TableState.State.DISABLED);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    LOG.info("Set {} to state={}", tableName, TableState.State.DISABLED);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Action after disabling table.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * @param env MasterProcedureEnv<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   * @param state the procedure state<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   * @throws IOException<a name="line.317"></a>
+<span class="sourceLineNo">318</span>   * @throws InterruptedException<a name="line.318"></a>
+<span class="sourceLineNo">319</span>   */<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  protected void postDisable(final MasterProcedureEnv env, final DisableTableState state)<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      throws IOException, InterruptedException {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    runCoprocessorAction(env, state);<a name="line.322"></a>
+<span class="sourceLineNo">323</span>  }<a name="line.323"></a>
+<span class="sourceLineNo">324</span><a name="line.324"></a>
+<span class="sourceLineNo">325</span>  /**<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * The procedure could be restarted from a different machine. If the variable is null, we need to<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * retrieve it.<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * @return traceEnabled<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   */<a name="line.329"></a>
+<span class="sourceLineNo">330</span>  private Boolean isTraceEnabled() {<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    if (traceEnabled == null) {<a name="line.331"></a>
+<span class="sourceLineNo">332</span>      traceEnabled = LOG.isTraceEnabled();<a name="line.332"></a>
+<span class="sourceLineNo">333</span>    }<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    return traceEnabled;<a name="line.334"></a>
+<span class="sourceLineNo">335</span>  }<a name="line.335"></a>
+<span class="sourceLineNo">336</span><a name="line.336"></a>
+<span class="sourceLineNo">337</span>  /**<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * Coprocessor Action.<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * @param env MasterProcedureEnv<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   * @param state the procedure state<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * @throws IOException<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * @throws InterruptedException<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   */<a name="line.343"></a>
+<span class="sourceLineNo">344</span>  private void runCoprocessorAction(final MasterProcedureEnv env, final DisableTableState state)<a name="line.344"></a>
+<span class="sourceLineNo">345</span>      throws IOException, InterruptedException {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    if (cpHost != null) {<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      switch (state) {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>        case DISABLE_TABLE_PRE_OPERATION:<a name="line.349"></a>
+<span class="sourceLineNo">350</span>          cpHost.preDisableTableAction(tableName, getUser());<a name="line.350"></a>
+<span class="sourceLineNo">351</span>          break;<a name="line.351"></a>
+<span class="sourceLineNo">352</span>        case DISABLE_TABLE_POST_OPERATION:<a name="line.352"></a>
+<span class="sourceLineNo">353</span>          cpHost.postCompletedDisableTableAction(tableName, getUser());<a name="line.353"></a>
+<span class="sourceLineNo">354</span>          break;<a name="line.354"></a>
+<span class="sourceLineNo">355</span>        default:<a name="line.355"></a>
+<span class="sourceLineNo">356</span>          throw new UnsupportedOperationException(this + " unhandled state=" + state);<a name="line.356"></a>
+<span class="sourceLineNo">357</span>      }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    }<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span>}<a name="line.360"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
index 22f68b3..9c54904 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
@@ -1508,7 +1508,7 @@
 <span class="sourceLineNo">1500</span>  private void execProcedure(RootProcedureState&lt;TEnvironment&gt; procStack,<a name="line.1500"></a>
 <span class="sourceLineNo">1501</span>      Procedure&lt;TEnvironment&gt; procedure) {<a name="line.1501"></a>
 <span class="sourceLineNo">1502</span>    Preconditions.checkArgument(procedure.getState() == ProcedureState.RUNNABLE,<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>      procedure.toString());<a name="line.1503"></a>
+<span class="sourceLineNo">1503</span>        "NOT RUNNABLE! " + procedure.toString());<a name="line.1503"></a>
 <span class="sourceLineNo">1504</span><a name="line.1504"></a>
 <span class="sourceLineNo">1505</span>    // Procedures can suspend themselves. They skip out by throwing a ProcedureSuspendedException.<a name="line.1505"></a>
 <span class="sourceLineNo">1506</span>    // The exception is caught below and then we hurry to the exit without disturbing state. The<a name="line.1506"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
index 22f68b3..9c54904 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
@@ -1508,7 +1508,7 @@
 <span class="sourceLineNo">1500</span>  private void execProcedure(RootProcedureState&lt;TEnvironment&gt; procStack,<a name="line.1500"></a>
 <span class="sourceLineNo">1501</span>      Procedure&lt;TEnvironment&gt; procedure) {<a name="line.1501"></a>
 <span class="sourceLineNo">1502</span>    Preconditions.checkArgument(procedure.getState() == ProcedureState.RUNNABLE,<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>      procedure.toString());<a name="line.1503"></a>
+<span class="sourceLineNo">1503</span>        "NOT RUNNABLE! " + procedure.toString());<a name="line.1503"></a>
 <span class="sourceLineNo">1504</span><a name="line.1504"></a>
 <span class="sourceLineNo">1505</span>    // Procedures can suspend themselves. They skip out by throwing a ProcedureSuspendedException.<a name="line.1505"></a>
 <span class="sourceLineNo">1506</span>    // The exception is caught below and then we hurry to the exit without disturbing state. The<a name="line.1506"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
index 22f68b3..9c54904 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
@@ -1508,7 +1508,7 @@
 <span class="sourceLineNo">1500</span>  private void execProcedure(RootProcedureState&lt;TEnvironment&gt; procStack,<a name="line.1500"></a>
 <span class="sourceLineNo">1501</span>      Procedure&lt;TEnvironment&gt; procedure) {<a name="line.1501"></a>
 <span class="sourceLineNo">1502</span>    Preconditions.checkArgument(procedure.getState() == ProcedureState.RUNNABLE,<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>      procedure.toString());<a name="line.1503"></a>
+<span class="sourceLineNo">1503</span>        "NOT RUNNABLE! " + procedure.toString());<a name="line.1503"></a>
 <span class="sourceLineNo">1504</span><a name="line.1504"></a>
 <span class="sourceLineNo">1505</span>    // Procedures can suspend themselves. They skip out by throwing a ProcedureSuspendedException.<a name="line.1505"></a>
 <span class="sourceLineNo">1506</span>    // The exception is caught below and then we hurry to the exit without disturbing state. The<a name="line.1506"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.KeepAliveWorkerThread.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.KeepAliveWorkerThread.html b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.KeepAliveWorkerThread.html
index 22f68b3..9c54904 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.KeepAliveWorkerThread.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.KeepAliveWorkerThread.html
@@ -1508,7 +1508,7 @@
 <span class="sourceLineNo">1500</span>  private void execProcedure(RootProcedureState&lt;TEnvironment&gt; procStack,<a name="line.1500"></a>
 <span class="sourceLineNo">1501</span>      Procedure&lt;TEnvironment&gt; procedure) {<a name="line.1501"></a>
 <span class="sourceLineNo">1502</span>    Preconditions.checkArgument(procedure.getState() == ProcedureState.RUNNABLE,<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>      procedure.toString());<a name="line.1503"></a>
+<span class="sourceLineNo">1503</span>        "NOT RUNNABLE! " + procedure.toString());<a name="line.1503"></a>
 <span class="sourceLineNo">1504</span><a name="line.1504"></a>
 <span class="sourceLineNo">1505</span>    // Procedures can suspend themselves. They skip out by throwing a ProcedureSuspendedException.<a name="line.1505"></a>
 <span class="sourceLineNo">1506</span>    // The exception is caught below and then we hurry to the exit without disturbing state. The<a name="line.1506"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.ProcedureExecutorListener.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.ProcedureExecutorListener.html b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.ProcedureExecutorListener.html
index 22f68b3..9c54904 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.ProcedureExecutorListener.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.ProcedureExecutorListener.html
@@ -1508,7 +1508,7 @@
 <span class="sourceLineNo">1500</span>  private void execProcedure(RootProcedureState&lt;TEnvironment&gt; procStack,<a name="line.1500"></a>
 <span class="sourceLineNo">1501</span>      Procedure&lt;TEnvironment&gt; procedure) {<a name="line.1501"></a>
 <span class="sourceLineNo">1502</span>    Preconditions.checkArgument(procedure.getState() == ProcedureState.RUNNABLE,<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>      procedure.toString());<a name="line.1503"></a>
+<span class="sourceLineNo">1503</span>        "NOT RUNNABLE! " + procedure.toString());<a name="line.1503"></a>
 <span class="sourceLineNo">1504</span><a name="line.1504"></a>
 <span class="sourceLineNo">1505</span>    // Procedures can suspend themselves. They skip out by throwing a ProcedureSuspendedException.<a name="line.1505"></a>
 <span class="sourceLineNo">1506</span>    // The exception is caught below and then we hurry to the exit without disturbing state. The<a name="line.1506"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.Testing.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.Testing.html b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.Testing.html
index 22f68b3..9c54904 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.Testing.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.Testing.html
@@ -1508,7 +1508,7 @@
 <span class="sourceLineNo">1500</span>  private void execProcedure(RootProcedureState&lt;TEnvironment&gt; procStack,<a name="line.1500"></a>
 <span class="sourceLineNo">1501</span>      Procedure&lt;TEnvironment&gt; procedure) {<a name="line.1501"></a>
 <span class="sourceLineNo">1502</span>    Preconditions.checkArgument(procedure.getState() == ProcedureState.RUNNABLE,<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>      procedure.toString());<a name="line.1503"></a>
+<span class="sourceLineNo">1503</span>        "NOT RUNNABLE! " + procedure.toString());<a name="line.1503"></a>
 <span class="sourceLineNo">1504</span><a name="line.1504"></a>
 <span class="sourceLineNo">1505</span>    // Procedures can suspend themselves. They skip out by throwing a ProcedureSuspendedException.<a name="line.1505"></a>
 <span class="sourceLineNo">1506</span>    // The exception is caught below and then we hurry to the exit without disturbing state. The<a name="line.1506"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerMonitor.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerMonitor.html b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerMonitor.html
index 22f68b3..9c54904 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerMonitor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerMonitor.html
@@ -1508,7 +1508,7 @@
 <span class="sourceLineNo">1500</span>  private void execProcedure(RootProcedureState&lt;TEnvironment&gt; procStack,<a name="line.1500"></a>
 <span class="sourceLineNo">1501</span>      Procedure&lt;TEnvironment&gt; procedure) {<a name="line.1501"></a>
 <span class="sourceLineNo">1502</span>    Preconditions.checkArgument(procedure.getState() == ProcedureState.RUNNABLE,<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>      procedure.toString());<a name="line.1503"></a>
+<span class="sourceLineNo">1503</span>        "NOT RUNNABLE! " + procedure.toString());<a name="line.1503"></a>
 <span class="sourceLineNo">1504</span><a name="line.1504"></a>
 <span class="sourceLineNo">1505</span>    // Procedures can suspend themselves. They skip out by throwing a ProcedureSuspendedException.<a name="line.1505"></a>
 <span class="sourceLineNo">1506</span>    // The exception is caught below and then we hurry to the exit without disturbing state. The<a name="line.1506"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerThread.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerThread.html b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerThread.html
index 22f68b3..9c54904 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerThread.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerThread.html
@@ -1508,7 +1508,7 @@
 <span class="sourceLineNo">1500</span>  private void execProcedure(RootProcedureState&lt;TEnvironment&gt; procStack,<a name="line.1500"></a>
 <span class="sourceLineNo">1501</span>      Procedure&lt;TEnvironment&gt; procedure) {<a name="line.1501"></a>
 <span class="sourceLineNo">1502</span>    Preconditions.checkArgument(procedure.getState() == ProcedureState.RUNNABLE,<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>      procedure.toString());<a name="line.1503"></a>
+<span class="sourceLineNo">1503</span>        "NOT RUNNABLE! " + procedure.toString());<a name="line.1503"></a>
 <span class="sourceLineNo">1504</span><a name="line.1504"></a>
 <span class="sourceLineNo">1505</span>    // Procedures can suspend themselves. They skip out by throwing a ProcedureSuspendedException.<a name="line.1505"></a>
 <span class="sourceLineNo">1506</span>    // The exception is caught below and then we hurry to the exit without disturbing state. The<a name="line.1506"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
index 22f68b3..9c54904 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
@@ -1508,7 +1508,7 @@
 <span class="sourceLineNo">1500</span>  private void execProcedure(RootProcedureState&lt;TEnvironment&gt; procStack,<a name="line.1500"></a>
 <span class="sourceLineNo">1501</span>      Procedure&lt;TEnvironment&gt; procedure) {<a name="line.1501"></a>
 <span class="sourceLineNo">1502</span>    Preconditions.checkArgument(procedure.getState() == ProcedureState.RUNNABLE,<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>      procedure.toString());<a name="line.1503"></a>
+<span class="sourceLineNo">1503</span>        "NOT RUNNABLE! " + procedure.toString());<a name="line.1503"></a>
 <span class="sourceLineNo">1504</span><a name="line.1504"></a>
 <span class="sourceLineNo">1505</span>    // Procedures can suspend themselves. They skip out by throwing a ProcedureSuspendedException.<a name="line.1505"></a>
 <span class="sourceLineNo">1506</span>    // The exception is caught below and then we hurry to the exit without disturbing state. The<a name="line.1506"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
index 7edd97a..db8431b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
@@ -1544,7 +1544,7 @@
 <span class="sourceLineNo">1536</span>    // the close flag?<a name="line.1536"></a>
 <span class="sourceLineNo">1537</span>    if (!abort &amp;&amp; worthPreFlushing() &amp;&amp; canFlush) {<a name="line.1537"></a>
 <span class="sourceLineNo">1538</span>      status.setStatus("Pre-flushing region before close");<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}" + this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
+<span class="sourceLineNo">1539</span>      LOG.info("Running close preflush of {}", this.getRegionInfo().getEncodedName());<a name="line.1539"></a>
 <span class="sourceLineNo">1540</span>      try {<a name="line.1540"></a>
 <span class="sourceLineNo">1541</span>        internalFlushcache(status);<a name="line.1541"></a>
 <span class="sourceLineNo">1542</span>      } catch (IOException ioe) {<a name="line.1542"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.DefaultSyncReplicationPeerInfoProvider.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.DefaultSyncReplicationPeerInfoProvider.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.DefaultSyncReplicationPeerInfoProvider.html
index c3c5b08..1f34b3b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.DefaultSyncReplicationPeerInfoProvider.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.DefaultSyncReplicationPeerInfoProvider.html
@@ -351,7 +351,13 @@
 <span class="sourceLineNo">343</span>      return Optional.empty();<a name="line.343"></a>
 <span class="sourceLineNo">344</span>    }<a name="line.344"></a>
 <span class="sourceLineNo">345</span>  }<a name="line.345"></a>
-<span class="sourceLineNo">346</span>}<a name="line.346"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  WALProvider getWrappedProvider() {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    return provider;<a name="line.349"></a>
+<span class="sourceLineNo">350</span>  }<a name="line.350"></a>
+<span class="sourceLineNo">351</span><a name="line.351"></a>
+<span class="sourceLineNo">352</span>}<a name="line.352"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html
index c3c5b08..1f34b3b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html
@@ -351,7 +351,13 @@
 <span class="sourceLineNo">343</span>      return Optional.empty();<a name="line.343"></a>
 <span class="sourceLineNo">344</span>    }<a name="line.344"></a>
 <span class="sourceLineNo">345</span>  }<a name="line.345"></a>
-<span class="sourceLineNo">346</span>}<a name="line.346"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  WALProvider getWrappedProvider() {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    return provider;<a name="line.349"></a>
+<span class="sourceLineNo">350</span>  }<a name="line.350"></a>
+<span class="sourceLineNo">351</span><a name="line.351"></a>
+<span class="sourceLineNo">352</span>}<a name="line.352"></a>
 
 
 


[38/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 6a2998c..7600159 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -349,9 +349,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index 4335db6..ff8a2f3 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -293,9 +293,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
 </ul>
 </li>


[45/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html b/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
index 6fcccaf..f3b483f 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#line.53">IncrementalBackupManager</a>
+public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#line.54">IncrementalBackupManager</a>
 extends <a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupManager.html" title="class in org.apache.hadoop.hbase.backup.impl">BackupManager</a></pre>
 <div class="block">After a full backup was created, the incremental backup will only store the changes made after
  the last full or incremental backup. Creating the backup copies the logfiles in .logs and
@@ -213,8 +213,8 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupMan
 </tr>
 <tr id="i1" class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#excludeAlreadyBackedUpWALs-java.util.List-java.util.List-">excludeAlreadyBackedUpWALs</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;logList,
-                          <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html" title="class in org.apache.hadoop.hbase.backup.impl">BackupSystemTable.WALItem</a>&gt;&nbsp;logFromSystemTable)</code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#excludeAlreadyBackedUpAndProcV2WALs-java.util.List-java.util.List-">excludeAlreadyBackedUpAndProcV2WALs</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;logList,
+                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html" title="class in org.apache.hadoop.hbase.backup.impl">BackupSystemTable.WALItem</a>&gt;&nbsp;logFromSystemTable)</code>&nbsp;</td>
 </tr>
 <tr id="i2" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;</code></td>
@@ -286,7 +286,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupMan
 <ul class="blockListLast">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>public static final&nbsp;org.slf4j.Logger <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#line.54">LOG</a></pre>
+<pre>public static final&nbsp;org.slf4j.Logger <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#line.55">LOG</a></pre>
 </li>
 </ul>
 </li>
@@ -303,7 +303,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupMan
 <ul class="blockListLast">
 <li class="blockList">
 <h4>IncrementalBackupManager</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#line.56">IncrementalBackupManager</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#line.57">IncrementalBackupManager</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
                                 org.apache.hadoop.conf.Configuration&nbsp;conf)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -326,7 +326,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupMan
 <ul class="blockList">
 <li class="blockList">
 <h4>getIncrBackupLogFileMap</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#line.66">getIncrBackupLogFileMap</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#line.67">getIncrBackupLogFileMap</a>()
                                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Obtain the list of logs that need to be copied out for this incremental backup. The list is set
  in BackupInfo.</div>
@@ -344,7 +344,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupMan
 <ul class="blockList">
 <li class="blockList">
 <h4>getIncrBackupLogFileList</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#line.116">getIncrBackupLogFileList</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#line.117">getIncrBackupLogFileList</a>()
                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get list of WAL files eligible for incremental backup.</div>
 <dl>
@@ -355,14 +355,14 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupMan
 </dl>
 </li>
 </ul>
-<a name="excludeAlreadyBackedUpWALs-java.util.List-java.util.List-">
+<a name="excludeAlreadyBackedUpAndProcV2WALs-java.util.List-java.util.List-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>excludeAlreadyBackedUpWALs</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#line.154">excludeAlreadyBackedUpWALs</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;logList,
-                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html" title="class in org.apache.hadoop.hbase.backup.impl">BackupSystemTable.WALItem</a>&gt;&nbsp;logFromSystemTable)</pre>
+<h4>excludeAlreadyBackedUpAndProcV2WALs</h4>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#line.154">excludeAlreadyBackedUpAndProcV2WALs</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;logList,
+                                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html" title="class in org.apache.hadoop.hbase.backup.impl">BackupSystemTable.WALItem</a>&gt;&nbsp;logFromSystemTable)</pre>
 </li>
 </ul>
 <a name="convertToSet-java.util.List-">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/backup/impl/class-use/BackupSystemTable.WALItem.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/class-use/BackupSystemTable.WALItem.html b/devapidocs/org/apache/hadoop/hbase/backup/impl/class-use/BackupSystemTable.WALItem.html
index 2600850..64ae4b7 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/class-use/BackupSystemTable.WALItem.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/class-use/BackupSystemTable.WALItem.html
@@ -140,8 +140,8 @@
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;</code></td>
-<td class="colLast"><span class="typeNameLabel">IncrementalBackupManager.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#excludeAlreadyBackedUpWALs-java.util.List-java.util.List-">excludeAlreadyBackedUpWALs</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;logList,
-                          <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html" title="class in org.apache.hadoop.hbase.backup.impl">BackupSystemTable.WALItem</a>&gt;&nbsp;logFromSystemTable)</code>&nbsp;</td>
+<td class="colLast"><span class="typeNameLabel">IncrementalBackupManager.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#excludeAlreadyBackedUpAndProcV2WALs-java.util.List-java.util.List-">excludeAlreadyBackedUpAndProcV2WALs</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;logList,
+                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html" title="class in org.apache.hadoop.hbase.backup.impl">BackupSystemTable.WALItem</a>&gt;&nbsp;logFromSystemTable)</code>&nbsp;</td>
 </tr>
 </tbody>
 </table>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html b/devapidocs/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
index 5281f72..8affc45 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
@@ -367,7 +367,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLo
 <ul class="blockList">
 <li class="blockList">
 <h4>setConf</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.123">setConf</a>(org.apache.hadoop.conf.Configuration&nbsp;config)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.122">setConf</a>(org.apache.hadoop.conf.Configuration&nbsp;config)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code>setConf</code>&nbsp;in interface&nbsp;<code>org.apache.hadoop.conf.Configurable</code></dd>
@@ -382,7 +382,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLo
 <ul class="blockList">
 <li class="blockList">
 <h4>stop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.133">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.132">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Stoppable.html#stop-java.lang.String-">Stoppable</a></code></span></div>
 <div class="block">Stop this service.
  Implementers should favor logging errors over throwing RuntimeExceptions.</div>
@@ -398,7 +398,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isStopped</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.141">isStopped</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.140">isStopped</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>True if <a href="../../../../../../org/apache/hadoop/hbase/Stoppable.html#stop-java.lang.String-"><code>Stoppable.stop(String)</code></a> has been closed.</dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 2cd8945..a6b88dd 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -167,10 +167,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index 3f66c59..a94689f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -552,24 +552,24 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/ScannerCallable.MoreResults.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">ScannerCallable.MoreResults</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncRequestFutureImpl.Retry</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncProcessTask.SubmittedRows.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncProcessTask.SubmittedRows</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncRequestFutureImpl.Retry</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/ScannerCallable.MoreResults.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">ScannerCallable.MoreResults</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncProcessTask.SubmittedRows.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncProcessTask.SubmittedRows</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html b/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
index 329f62f..e67b778 100644
--- a/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
@@ -201,8 +201,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">RegionObserver.MutationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.MetaTableOps.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">MetaTableMetrics.MetaTableOps</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">RegionObserver.MutationType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 308d987..c2e953c 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -183,14 +183,14 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">CompareFilter.CompareOp</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">CompareFilter.CompareOp</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html
index 096b031..9263a63 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html
@@ -99,6 +99,10 @@ var activeTableTab = "activeTableTab";
 <div class="description">
 <ul class="blockList">
 <li class="blockList">
+<dl>
+<dt>All Known Implementing Classes:</dt>
+<dd><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockDeserializer</a></dd>
+</dl>
 <hr>
 <br>
 <pre>@InterfaceAudience.Private

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
index 3671f6a..890d66f 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":9,"i1":9};
+var methods = {"i0":9,"i1":9,"i2":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,10 +110,13 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.32">CacheableDeserializerIdManager</a>
+public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.35">CacheableDeserializerIdManager</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
-<div class="block">This class is used to manage the identifiers for
- <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>CacheableDeserializer</code></a></div>
+<div class="block">This class is used to manage the identifiers for <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>CacheableDeserializer</code></a>.
+ All deserializers are registered with this Manager via the
+ <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#registerDeserializer-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-"><code>registerDeserializer(CacheableDeserializer)</code></a>}. On registration, we return an
+ int *identifier* for this deserializer. The int identifier is passed to
+ <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#getDeserializer-int-"><code>getDeserializer(int)</code></a>} to obtain the registered deserializer instance.</div>
 </li>
 </ul>
 </div>
@@ -175,14 +178,22 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <tr id="i0" class="altColor">
 <td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#getDeserializer-int-">getDeserializer</a></span>(int&nbsp;id)</code>
-<div class="block">Get the cacheable deserializer as the given identifier Id</div>
+<div class="block">Get the cacheable deserializer registered at the given identifier Id.</div>
 </td>
 </tr>
 <tr id="i1" class="rowColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#registerDeserializer-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-">registerDeserializer</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;cd)</code>
-<div class="block">Register the given cacheable deserializer and generate an unique identifier
- id for it</div>
+<div class="block">Register the given <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>Cacheable</code></a> -- usually an hfileblock instance, these implement
+ the Cacheable Interface -- deserializer and generate an unique identifier id for it and return
+ this as our result.</div>
+</td>
+</tr>
+<tr id="i2" class="altColor">
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#save--">save</a></span>()</code>
+<div class="block">Snapshot a map of the current identifiers to class names for reconstruction on reading out
+ of a file.</div>
 </td>
 </tr>
 </table>
@@ -213,7 +224,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>registeredDeserializers</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.33">registeredDeserializers</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.36">registeredDeserializers</a></pre>
 </li>
 </ul>
 <a name="identifier">
@@ -222,7 +233,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>identifier</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.34">identifier</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.37">identifier</a></pre>
 </li>
 </ul>
 </li>
@@ -239,7 +250,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CacheableDeserializerIdManager</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.32">CacheableDeserializerIdManager</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.35">CacheableDeserializerIdManager</a>()</pre>
 </li>
 </ul>
 </li>
@@ -256,33 +267,43 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>registerDeserializer</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.42">registerDeserializer</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;cd)</pre>
-<div class="block">Register the given cacheable deserializer and generate an unique identifier
- id for it</div>
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.46">registerDeserializer</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;cd)</pre>
+<div class="block">Register the given <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>Cacheable</code></a> -- usually an hfileblock instance, these implement
+ the Cacheable Interface -- deserializer and generate an unique identifier id for it and return
+ this as our result.</div>
 <dl>
-<dt><span class="paramLabel">Parameters:</span></dt>
-<dd><code>cd</code> - </dd>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the identifier of given cacheable deserializer</dd>
+<dt><span class="seeLabel">See Also:</span></dt>
+<dd><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#getDeserializer-int-"><code>getDeserializer(int)</code></a></dd>
 </dl>
 </li>
 </ul>
 <a name="getDeserializer-int-">
 <!--   -->
 </a>
-<ul class="blockListLast">
+<ul class="blockList">
 <li class="blockList">
 <h4>getDeserializer</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.55">getDeserializer</a>(int&nbsp;id)</pre>
-<div class="block">Get the cacheable deserializer as the given identifier Id</div>
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.58">getDeserializer</a>(int&nbsp;id)</pre>
+<div class="block">Get the cacheable deserializer registered at the given identifier Id.</div>
 <dl>
-<dt><span class="paramLabel">Parameters:</span></dt>
-<dd><code>id</code> - </dd>
-<dt><span class="returnLabel">Returns:</span></dt>
-<dd>CacheableDeserializer</dd>
+<dt><span class="seeLabel">See Also:</span></dt>
+<dd><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#registerDeserializer-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-"><code>registerDeserializer(CacheableDeserializer)</code></a></dd>
 </dl>
 </li>
 </ul>
+<a name="save--">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>save</h4>
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#line.66">save</a>()</pre>
+<div class="block">Snapshot a map of the current identifiers to class names for reconstruction on reading out
+ of a file.</div>
+</li>
+</ul>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html
new file mode 100644
index 0000000..c71315c
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html
@@ -0,0 +1,349 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<!-- NewPage -->
+<html lang="en">
+<head>
+<!-- Generated by javadoc -->
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>HFileBlock.BlockDeserializer (Apache HBase 3.0.0-SNAPSHOT API)</title>
+<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
+<script type="text/javascript" src="../../../../../../script.js"></script>
+</head>
+<body>
+<script type="text/javascript"><!--
+    try {
+        if (location.href.indexOf('is-external=true') == -1) {
+            parent.document.title="HFileBlock.BlockDeserializer (Apache HBase 3.0.0-SNAPSHOT API)";
+        }
+    }
+    catch(err) {
+    }
+//-->
+var methods = {"i0":10,"i1":10,"i2":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+</script>
+<noscript>
+<div>JavaScript is disabled on your browser.</div>
+</noscript>
+<!-- ========= START OF TOP NAVBAR ======= -->
+<div class="topNav"><a name="navbar.top">
+<!--   -->
+</a>
+<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
+<a name="navbar.top.firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="../../../../../../overview-summary.html">Overview</a></li>
+<li><a href="package-summary.html">Package</a></li>
+<li class="navBarCell1Rev">Class</li>
+<li><a href="class-use/HFileBlock.BlockDeserializer.html">Use</a></li>
+<li><a href="package-tree.html">Tree</a></li>
+<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
+<li><a href="../../../../../../index-all.html">Index</a></li>
+<li><a href="../../../../../../help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+</ul>
+<ul class="navList">
+<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" target="_top">Frames</a></li>
+<li><a href="HFileBlock.BlockDeserializer.html" target="_top">No&nbsp;Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_top">
+<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_top");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<div>
+<ul class="subNavList">
+<li>Summary:&nbsp;</li>
+<li>Nested&nbsp;|&nbsp;</li>
+<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method.summary">Method</a></li>
+</ul>
+<ul class="subNavList">
+<li>Detail:&nbsp;</li>
+<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method.detail">Method</a></li>
+</ul>
+</div>
+<a name="skip.navbar.top">
+<!--   -->
+</a></div>
+<!-- ========= END OF TOP NAVBAR ========= -->
+<!-- ======== START OF CLASS DATA ======== -->
+<div class="header">
+<div class="subTitle">org.apache.hadoop.hbase.io.hfile</div>
+<h2 title="Class HFileBlock.BlockDeserializer" class="title">Class HFileBlock.BlockDeserializer</h2>
+</div>
+<div class="contentContainer">
+<ul class="inheritance">
+<li><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">java.lang.Object</a></li>
+<li>
+<ul class="inheritance">
+<li>org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockDeserializer</li>
+</ul>
+</li>
+</ul>
+<div class="description">
+<ul class="blockList">
+<li class="blockList">
+<dl>
+<dt>All Implemented Interfaces:</dt>
+<dd><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</dd>
+</dl>
+<dl>
+<dt>Enclosing class:</dt>
+<dd><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dd>
+</dl>
+<hr>
+<br>
+<pre>public static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.258">HFileBlock.BlockDeserializer</a>
+extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
+implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</pre>
+</li>
+</ul>
+</div>
+<div class="summary">
+<ul class="blockList">
+<li class="blockList">
+<!-- ======== CONSTRUCTOR SUMMARY ======== -->
+<ul class="blockList">
+<li class="blockList"><a name="constructor.summary">
+<!--   -->
+</a>
+<h3>Constructor Summary</h3>
+<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
+<caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier</th>
+<th class="colLast" scope="col">Constructor and Description</th>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>private </code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#BlockDeserializer--">BlockDeserializer</a></span>()</code>&nbsp;</td>
+</tr>
+</table>
+</li>
+</ul>
+<!-- ========== METHOD SUMMARY =========== -->
+<ul class="blockList">
+<li class="blockList"><a name="method.summary">
+<!--   -->
+</a>
+<h3>Method Summary</h3>
+<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
+<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Method and Description</th>
+</tr>
+<tr id="i0" class="altColor">
+<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-">deserialize</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;b)</code>
+<div class="block">Returns the deserialized object.</div>
+</td>
+</tr>
+<tr id="i1" class="rowColor">
+<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-boolean-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType-">deserialize</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buf,
+           boolean&nbsp;reuse,
+           <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a>&nbsp;memType)</code>&nbsp;</td>
+</tr>
+<tr id="i2" class="altColor">
+<td class="colFirst"><code>int</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#getDeserialiserIdentifier--">getDeserialiserIdentifier</a></span>()</code>
+<div class="block">Get the identifier of this deserialiser.</div>
+</td>
+</tr>
+</table>
+<ul class="blockList">
+<li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
+<!--   -->
+</a>
+<h3>Methods inherited from class&nbsp;java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></h3>
+<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--" title="class or interface in java.lang">clone</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.lang">equals</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--" title="class or interface in java.lang">finalize</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--" title="class or interface in java.lang">getClass</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--" title="class or interface in java.lang">hashCode</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--" title="class or interface in java.lang">notify</a>, <a href="https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in java.lang">notifyAll</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--" title="class or interface in java.lang">wait</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-" title="class or interface in java.lang">wait</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-" title="class or interface in java.lang">wait</a></code></li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+</div>
+<div class="details">
+<ul class="blockList">
+<li class="blockList">
+<!-- ========= CONSTRUCTOR DETAIL ======== -->
+<ul class="blockList">
+<li class="blockList"><a name="constructor.detail">
+<!--   -->
+</a>
+<h3>Constructor Detail</h3>
+<a name="BlockDeserializer--">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>BlockDeserializer</h4>
+<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#line.259">BlockDeserializer</a>()</pre>
+</li>
+</ul>
+</li>
+</ul>
+<!-- ============ METHOD DETAIL ========== -->
+<ul class="blockList">
+<li class="blockList"><a name="method.detail">
+<!--   -->
+</a>
+<h3>Method Detail</h3>
+<a name="deserialize-org.apache.hadoop.hbase.nio.ByteBuff-boolean-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>deserialize</h4>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#line.263">deserialize</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buf,
+                              boolean&nbsp;reuse,
+                              <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a>&nbsp;memType)
+                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-boolean-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType-">deserialize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></dd>
+<dd><code>reuse</code> - true if Cacheable object can use the given buffer as its
+          content</dd>
+<dd><code>memType</code> - the <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><code>Cacheable.MemoryType</code></a> of the buffer</dd>
+<dt><span class="returnLabel">Returns:</span></dt>
+<dd>T the deserialized object.</dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
+</dl>
+</li>
+</ul>
+<a name="getDeserialiserIdentifier--">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>getDeserialiserIdentifier</h4>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#line.289">getDeserialiserIdentifier</a>()</pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html#getDeserialiserIdentifier--">CacheableDeserializer</a></code></span></div>
+<div class="block">Get the identifier of this deserialiser. Identifier is unique for each
+ deserializer and generated by <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>CacheableDeserializerIdManager</code></a></div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html#getDeserialiserIdentifier--">getDeserialiserIdentifier</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></dd>
+<dt><span class="returnLabel">Returns:</span></dt>
+<dd>identifier number of this cacheable deserializer</dd>
+</dl>
+</li>
+</ul>
+<a name="deserialize-org.apache.hadoop.hbase.nio.ByteBuff-">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>deserialize</h4>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#line.294">deserialize</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;b)
+                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-">CacheableDeserializer</a></code></span></div>
+<div class="block">Returns the deserialized object.</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-">deserialize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></dd>
+<dt><span class="returnLabel">Returns:</span></dt>
+<dd>T the deserialized object.</dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
+</dl>
+</li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+</div>
+</div>
+<!-- ========= END OF CLASS DATA ========= -->
+<!-- ======= START OF BOTTOM NAVBAR ====== -->
+<div class="bottomNav"><a name="navbar.bottom">
+<!--   -->
+</a>
+<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
+<a name="navbar.bottom.firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="../../../../../../overview-summary.html">Overview</a></li>
+<li><a href="package-summary.html">Package</a></li>
+<li class="navBarCell1Rev">Class</li>
+<li><a href="class-use/HFileBlock.BlockDeserializer.html">Use</a></li>
+<li><a href="package-tree.html">Tree</a></li>
+<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
+<li><a href="../../../../../../index-all.html">Index</a></li>
+<li><a href="../../../../../../help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+</ul>
+<ul class="navList">
+<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" target="_top">Frames</a></li>
+<li><a href="HFileBlock.BlockDeserializer.html" target="_top">No&nbsp;Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_bottom">
+<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_bottom");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<div>
+<ul class="subNavList">
+<li>Summary:&nbsp;</li>
+<li>Nested&nbsp;|&nbsp;</li>
+<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method.summary">Method</a></li>
+</ul>
+<ul class="subNavList">
+<li>Detail:&nbsp;</li>
+<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method.detail">Method</a></li>
+</ul>
+</div>
+<a name="skip.navbar.bottom">
+<!--   -->
+</a></div>
+<!-- ======== END OF BOTTOM NAVBAR ======= -->
+<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2018 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
+</body>
+</html>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
index b6e49f2..07680ab 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1345">HFileBlock.BlockIterator</a></pre>
+<pre>static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1349">HFileBlock.BlockIterator</a></pre>
 <div class="block">Iterator for <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFileBlock</code></a>s.</div>
 </li>
 </ul>
@@ -159,7 +159,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>nextBlock</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html#line.1349">nextBlock</a>()
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html#line.1353">nextBlock</a>()
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get the next block, or null if there are no more blocks to iterate.</div>
 <dl>
@@ -174,7 +174,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>nextBlockWithBlockType</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html#line.1355">nextBlockWithBlockType</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html#line.1359">nextBlockWithBlockType</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Similar to <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html#nextBlock--"><code>nextBlock()</code></a> but checks block type, throws an
  exception if incorrect, and returns the HFile block</div>
@@ -212,7 +212,7 @@ var activeTableTab = "activeTableTab";
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
index 823ec80..a184d94 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1331">HFileBlock.BlockWritable</a></pre>
+<pre>static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1335">HFileBlock.BlockWritable</a></pre>
 <div class="block">Something that can be written into a block.</div>
 </li>
 </ul>
@@ -158,7 +158,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockType</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html#line.1333">getBlockType</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html#line.1337">getBlockType</a>()</pre>
 <div class="block">The type of block this data should use.</div>
 </li>
 </ul>
@@ -168,7 +168,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>writeToBlock</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html#line.1341">writeToBlock</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutput.html?is-external=true" title="class or interface in java.io">DataOutput</a>&nbsp;out)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html#line.1345">writeToBlock</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutput.html?is-external=true" title="class or interface in java.io">DataOutput</a>&nbsp;out)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Writes the block to the provided stream. Must not write any magic
  records.</div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
index 70e0e81..1a7b9d6 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1359">HFileBlock.FSReader</a></pre>
+<pre>static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1363">HFileBlock.FSReader</a></pre>
 <div class="block">An HFile block reader with iteration ability.</div>
 </li>
 </ul>
@@ -199,7 +199,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>readBlockData</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1369">readBlockData</a>(long&nbsp;offset,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1373">readBlockData</a>(long&nbsp;offset,
                          long&nbsp;onDiskSize,
                          boolean&nbsp;pread,
                          boolean&nbsp;updateMetrics)
@@ -224,7 +224,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>blockRange</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockIterator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1383">blockRange</a>(long&nbsp;startOffset,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockIterator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1387">blockRange</a>(long&nbsp;startOffset,
                                     long&nbsp;endOffset)</pre>
 <div class="block">Creates a block iterator over the given portion of the <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>.
  The iterator returns blocks starting with offset such that offset &lt;=
@@ -246,7 +246,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>closeStreams</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1386">closeStreams</a>()
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1390">closeStreams</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Closes the backing streams</div>
 <dl>
@@ -261,7 +261,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockDecodingContext</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1389">getBlockDecodingContext</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1393">getBlockDecodingContext</a>()</pre>
 <div class="block">Get a decoder for <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html#ENCODED_DATA"><code>BlockType.ENCODED_DATA</code></a> blocks from this file.</div>
 </li>
 </ul>
@@ -271,7 +271,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>getDefaultBlockDecodingContext</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1392">getDefaultBlockDecodingContext</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1396">getDefaultBlockDecodingContext</a>()</pre>
 <div class="block">Get the default decoder for blocks from this file.</div>
 </li>
 </ul>
@@ -281,7 +281,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>setIncludesMemStoreTS</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1394">setIncludesMemStoreTS</a>(boolean&nbsp;includesMemstoreTS)</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1398">setIncludesMemStoreTS</a>(boolean&nbsp;includesMemstoreTS)</pre>
 </li>
 </ul>
 <a name="setDataBlockEncoder-org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder-">
@@ -290,7 +290,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>setDataBlockEncoder</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1395">setDataBlockEncoder</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;encoder)</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1399">setDataBlockEncoder</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;encoder)</pre>
 </li>
 </ul>
 <a name="unbufferStream--">
@@ -299,7 +299,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>unbufferStream</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1401">unbufferStream</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1405">unbufferStream</a>()</pre>
 <div class="block">To close the stream's socket. Note: This can be concurrently called from multiple threads and
  implementation should take care of thread safety.</div>
 </li>


[47/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/checkstyle.rss
----------------------------------------------------------------------
diff --git a/checkstyle.rss b/checkstyle.rss
index 217f86d..e2959b5 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
     <language>en-us</language>
     <copyright>&#169;2007 - 2018 The Apache Software Foundation</copyright>
     <item>
-      <title>File: 3697,
-             Errors: 15626,
+      <title>File: 3698,
+             Errors: 15578,
              Warnings: 0,
              Infos: 0
       </title>
@@ -3023,7 +3023,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  8
+                  10
                 </td>
               </tr>
                           <tr>
@@ -5137,7 +5137,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  12
+                  13
                 </td>
               </tr>
                           <tr>
@@ -5305,7 +5305,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  0
+                  1
                 </td>
               </tr>
                           <tr>
@@ -6887,7 +6887,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  23
+                  22
                 </td>
               </tr>
                           <tr>
@@ -7965,7 +7965,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  15
+                  10
                 </td>
               </tr>
                           <tr>
@@ -8175,7 +8175,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  14
+                  13
                 </td>
               </tr>
                           <tr>
@@ -8348,20 +8348,6 @@ under the License.
               </tr>
                           <tr>
                 <td>
-                  <a href="http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap.java">org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.java</a>
-                </td>
-                <td>
-                  0
-                </td>
-                <td>
-                  0
-                </td>
-                <td>
-                  1
-                </td>
-              </tr>
-                          <tr>
-                <td>
                   <a href="http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.master.TestRollingRestart.java">org/apache/hadoop/hbase/master/TestRollingRestart.java</a>
                 </td>
                 <td>
@@ -9314,6 +9300,20 @@ under the License.
               </tr>
                           <tr>
                 <td>
+                  <a href="http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.io.hfile.bucket.BucketProtoUtils.java">org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.java</a>
+                </td>
+                <td>
+                  0
+                </td>
+                <td>
+                  0
+                </td>
+                <td>
+                  0
+                </td>
+              </tr>
+                          <tr>
+                <td>
                   <a href="http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.coprocessor.CoprocessorServiceBackwardCompatiblity.java">org/apache/hadoop/hbase/coprocessor/CoprocessorServiceBackwardCompatiblity.java</a>
                 </td>
                 <td>
@@ -9729,7 +9729,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  13
+                  14
                 </td>
               </tr>
                           <tr>
@@ -14522,6 +14522,20 @@ under the License.
               </tr>
                           <tr>
                 <td>
+                  <a href="http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.chaos.actions.RestartActiveNameNodeAction.java">org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.java</a>
+                </td>
+                <td>
+                  0
+                </td>
+                <td>
+                  0
+                </td>
+                <td>
+                  0
+                </td>
+              </tr>
+                          <tr>
+                <td>
                   <a href="http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.replication.regionserver.TestSourceFSConfigurationProvider.java">org/apache/hadoop/hbase/replication/regionserver/TestSourceFSConfigurationProvider.java</a>
                 </td>
                 <td>
@@ -14923,7 +14937,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  5
+                  4
                 </td>
               </tr>
                           <tr>
@@ -16141,7 +16155,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  44
+                  43
                 </td>
               </tr>
                           <tr>
@@ -17177,7 +17191,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  10
+                  9
                 </td>
               </tr>
                           <tr>
@@ -17471,7 +17485,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  19
+                  1
                 </td>
               </tr>
                           <tr>
@@ -17611,7 +17625,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  14
+                  13
                 </td>
               </tr>
                           <tr>
@@ -19767,7 +19781,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  9
+                  8
                 </td>
               </tr>
                           <tr>
@@ -22525,7 +22539,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  9
+                  10
                 </td>
               </tr>
                           <tr>
@@ -23603,7 +23617,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  22
+                  21
                 </td>
               </tr>
                           <tr>
@@ -26585,7 +26599,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  53
+                  52
                 </td>
               </tr>
                           <tr>
@@ -26669,7 +26683,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  0
+                  4
                 </td>
               </tr>
                           <tr>
@@ -29259,7 +29273,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  80
+                  79
                 </td>
               </tr>
                           <tr>
@@ -31233,7 +31247,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  276
+                  275
                 </td>
               </tr>
                           <tr>
@@ -32353,7 +32367,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  89
+                  87
                 </td>
               </tr>
                           <tr>
@@ -33095,7 +33109,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  1
+                  0
                 </td>
               </tr>
                           <tr>
@@ -33375,7 +33389,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  60
+                  52
                 </td>
               </tr>
                           <tr>
@@ -34285,7 +34299,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  0
+                  2
                 </td>
               </tr>
                           <tr>
@@ -35097,7 +35111,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  7
+                  8
                 </td>
               </tr>
                           <tr>
@@ -35531,7 +35545,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  29
+                  28
                 </td>
               </tr>
                           <tr>
@@ -36189,7 +36203,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  0
+                  2
                 </td>
               </tr>
                           <tr>
@@ -37645,7 +37659,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  20
+                  19
                 </td>
               </tr>
                           <tr>
@@ -39241,7 +39255,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  38
+                  30
                 </td>
               </tr>
                           <tr>
@@ -41355,7 +41369,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  3
+                  4
                 </td>
               </tr>
                           <tr>
@@ -41593,7 +41607,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  18
+                  17
                 </td>
               </tr>
                           <tr>
@@ -48649,7 +48663,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  41
+                  40
                 </td>
               </tr>
                           <tr>
@@ -48803,7 +48817,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  10
+                  9
                 </td>
               </tr>
                           <tr>
@@ -48817,7 +48831,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  75
+                  74
                 </td>
               </tr>
                           <tr>
@@ -49405,7 +49419,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  4
+                  2
                 </td>
               </tr>
                           <tr>
@@ -51435,7 +51449,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  2
+                  1
                 </td>
               </tr>
                           <tr>
@@ -51547,7 +51561,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  8
+                  7
                 </td>
               </tr>
                           <tr>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/coc.html
----------------------------------------------------------------------
diff --git a/coc.html b/coc.html
index 9e26700..b5a7e91 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Code of Conduct Policy
@@ -375,7 +375,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/dependencies.html
----------------------------------------------------------------------
diff --git a/dependencies.html b/dependencies.html
index 13892e4..e2a0456 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependencies</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -440,7 +440,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/dependency-convergence.html
----------------------------------------------------------------------
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 3bd85cd..76f14aa 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Reactor Dependency Convergence</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -905,7 +905,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/dependency-info.html
----------------------------------------------------------------------
diff --git a/dependency-info.html b/dependency-info.html
index ba88ea2..71d45a9 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Dependency Information</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -313,7 +313,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/dependency-management.html
----------------------------------------------------------------------
diff --git a/dependency-management.html b/dependency-management.html
index 17aefcc..c45d0ca 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependency Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -1005,7 +1005,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/allclasses-frame.html
----------------------------------------------------------------------
diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html
index 55b1c70..3edc10c 100644
--- a/devapidocs/allclasses-frame.html
+++ b/devapidocs/allclasses-frame.html
@@ -312,6 +312,7 @@
 <li><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">BucketCache.SharedMemoryBucketEntry</a></li>
 <li><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">BucketCache.StatisticsThread</a></li>
 <li><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">BucketCacheStats</a></li>
+<li><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">BucketProtoUtils</a></li>
 <li><a href="org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.html" title="class in org.apache.hadoop.hbase.ipc" target="classFrame">BufferCallBeforeInitHandler</a></li>
 <li><a href="org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc" target="classFrame">BufferCallBeforeInitHandler.BufferCallAction</a></li>
 <li><a href="org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallEvent.html" title="class in org.apache.hadoop.hbase.ipc" target="classFrame">BufferCallBeforeInitHandler.BufferCallEvent</a></li>
@@ -1102,6 +1103,7 @@
 <li><a href="org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.html" title="class in org.apache.hadoop.hbase.backup.example" target="classFrame">HFileArchiveTableMonitor</a></li>
 <li><a href="org/apache/hadoop/hbase/util/HFileArchiveUtil.html" title="class in org.apache.hadoop.hbase.util" target="classFrame">HFileArchiveUtil</a></li>
 <li><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile" target="classFrame">HFileBlock</a></li>
+<li><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile" target="classFrame">HFileBlock.BlockDeserializer</a></li>
 <li><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile" target="classFrame"><span class="interfaceName">HFileBlock.BlockIterator</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile" target="classFrame"><span class="interfaceName">HFileBlock.BlockWritable</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile" target="classFrame"><span class="interfaceName">HFileBlock.FSReader</span></a></li>
@@ -2846,7 +2848,6 @@
 <li><a href="org/apache/hadoop/hbase/types/Union2.html" title="class in org.apache.hadoop.hbase.types" target="classFrame">Union2</a></li>
 <li><a href="org/apache/hadoop/hbase/types/Union3.html" title="class in org.apache.hadoop.hbase.types" target="classFrame">Union3</a></li>
 <li><a href="org/apache/hadoop/hbase/types/Union4.html" title="class in org.apache.hadoop.hbase.types" target="classFrame">Union4</a></li>
-<li><a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">UniqueIndexMap</a></li>
 <li><a href="org/apache/hadoop/hbase/exceptions/UnknownProtocolException.html" title="class in org.apache.hadoop.hbase.exceptions" target="classFrame">UnknownProtocolException</a></li>
 <li><a href="org/apache/hadoop/hbase/UnknownRegionException.html" title="class in org.apache.hadoop.hbase" target="classFrame">UnknownRegionException</a></li>
 <li><a href="org/apache/hadoop/hbase/UnknownScannerException.html" title="class in org.apache.hadoop.hbase" target="classFrame">UnknownScannerException</a></li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/allclasses-noframe.html
----------------------------------------------------------------------
diff --git a/devapidocs/allclasses-noframe.html b/devapidocs/allclasses-noframe.html
index bdcf523..8f9f88f 100644
--- a/devapidocs/allclasses-noframe.html
+++ b/devapidocs/allclasses-noframe.html
@@ -312,6 +312,7 @@
 <li><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.SharedMemoryBucketEntry</a></li>
 <li><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.StatisticsThread</a></li>
 <li><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCacheStats</a></li>
+<li><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketProtoUtils</a></li>
 <li><a href="org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.html" title="class in org.apache.hadoop.hbase.ipc">BufferCallBeforeInitHandler</a></li>
 <li><a href="org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc">BufferCallBeforeInitHandler.BufferCallAction</a></li>
 <li><a href="org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallEvent.html" title="class in org.apache.hadoop.hbase.ipc">BufferCallBeforeInitHandler.BufferCallEvent</a></li>
@@ -1102,6 +1103,7 @@
 <li><a href="org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.html" title="class in org.apache.hadoop.hbase.backup.example">HFileArchiveTableMonitor</a></li>
 <li><a href="org/apache/hadoop/hbase/util/HFileArchiveUtil.html" title="class in org.apache.hadoop.hbase.util">HFileArchiveUtil</a></li>
 <li><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></li>
+<li><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockDeserializer</a></li>
 <li><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="interfaceName">HFileBlock.BlockIterator</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="interfaceName">HFileBlock.BlockWritable</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="interfaceName">HFileBlock.FSReader</span></a></li>
@@ -2846,7 +2848,6 @@
 <li><a href="org/apache/hadoop/hbase/types/Union2.html" title="class in org.apache.hadoop.hbase.types">Union2</a></li>
 <li><a href="org/apache/hadoop/hbase/types/Union3.html" title="class in org.apache.hadoop.hbase.types">Union3</a></li>
 <li><a href="org/apache/hadoop/hbase/types/Union4.html" title="class in org.apache.hadoop.hbase.types">Union4</a></li>
-<li><a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a></li>
 <li><a href="org/apache/hadoop/hbase/exceptions/UnknownProtocolException.html" title="class in org.apache.hadoop.hbase.exceptions">UnknownProtocolException</a></li>
 <li><a href="org/apache/hadoop/hbase/UnknownRegionException.html" title="class in org.apache.hadoop.hbase">UnknownRegionException</a></li>
 <li><a href="org/apache/hadoop/hbase/UnknownScannerException.html" title="class in org.apache.hadoop.hbase">UnknownScannerException</a></li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/constant-values.html
----------------------------------------------------------------------
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 6c0f466..a486014 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -3803,21 +3803,21 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#date">date</a></code></td>
-<td class="colLast"><code>"Wed Aug  1 14:39:39 UTC 2018"</code></td>
+<td class="colLast"><code>"Thu Aug  2 19:41:57 UTC 2018"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.revision">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#revision">revision</a></code></td>
-<td class="colLast"><code>"323907f84fcb5ca2cb33131e212ccda4ace76c68"</code></td>
+<td class="colLast"><code>"613d831429960348dc42c3bdb6ea5d31be15c81c"</code></td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.srcChecksum">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#srcChecksum">srcChecksum</a></code></td>
-<td class="colLast"><code>"5b583e7044e19505b52ef0c80df82305"</code></td>
+<td class="colLast"><code>"1bde06453e3a7ac1ff5e22617f911b02"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.url">
@@ -10387,25 +10387,6 @@
 </li>
 <li class="blockList">
 <table class="constantsSummary" border="0" cellpadding="3" cellspacing="0" summary="Constant Field Values table, listing constant fields, and values">
-<caption><span>org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="type parameter in UniqueIndexMap">T</a>&gt;</span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th scope="col">Constant Field</th>
-<th class="colLast" scope="col">Value</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap.serialVersionUID">
-<!--   -->
-</a><code>private&nbsp;static&nbsp;final&nbsp;long</code></td>
-<td><code><a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#serialVersionUID">serialVersionUID</a></code></td>
-<td class="colLast"><code>-1145635738654002342L</code></td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList">
-<table class="constantsSummary" border="0" cellpadding="3" cellspacing="0" summary="Constant Field Values table, listing constant fields, and values">
 <caption><span>org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UnsafeSharedMemoryBucketEntry</a></span><span class="tabEnd">&nbsp;</span></caption>
 <tr>
 <th class="colFirst" scope="col">Modifier and Type</th>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/index-all.html
----------------------------------------------------------------------
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 8f51afb..c0aecfb 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -6293,6 +6293,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexReader.html#blockDataSizes">blockDataSizes</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexReader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexReader</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#BlockDeserializer--">BlockDeserializer()</a></span> - Constructor for class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockDeserializer</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/MetricsHeapMemoryManagerSource.html#BLOCKED_FLUSH_DESC">BLOCKED_FLUSH_DESC</a></span> - Static variable in interface org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/MetricsHeapMemoryManagerSource.html" title="interface in org.apache.hadoop.hbase.regionserver">MetricsHeapMemoryManagerSource</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/MetricsHeapMemoryManagerSource.html#BLOCKED_FLUSH_GAUGE_DESC">BLOCKED_FLUSH_GAUGE_DESC</a></span> - Static variable in interface org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/MetricsHeapMemoryManagerSource.html" title="interface in org.apache.hadoop.hbase.regionserver">MetricsHeapMemoryManagerSource</a></dt>
@@ -6832,6 +6834,10 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.BucketSizeInfo.html#bucketList">bucketList</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.BucketSizeInfo.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.BucketSizeInfo</a></dt>
 <dd>&nbsp;</dd>
+<dt><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">BucketProtoUtils</span></a> - Class in <a href="org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html">org.apache.hadoop.hbase.io.hfile.bucket</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#BucketProtoUtils--">BucketProtoUtils()</a></span> - Constructor for class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketProtoUtils</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#buckets">buckets</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#bucketSize">bucketSize</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntryGroup</a></dt>
@@ -7971,8 +7977,7 @@
 </dd>
 <dt><a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">CacheableDeserializerIdManager</span></a> - Class in <a href="org/apache/hadoop/hbase/io/hfile/package-summary.html">org.apache.hadoop.hbase.io.hfile</a></dt>
 <dd>
-<div class="block">This class is used to manage the identifiers for
- <a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>CacheableDeserializer</code></a></div>
+<div class="block">This class is used to manage the identifiers for <a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>CacheableDeserializer</code></a>.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#CacheableDeserializerIdManager--">CacheableDeserializerIdManager()</a></span> - Constructor for class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheableDeserializerIdManager</a></dt>
 <dd>&nbsp;</dd>
@@ -23398,8 +23403,6 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/HConstants.html#DEFAULT_META_REPLICA_NUM">DEFAULT_META_REPLICA_NUM</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/HConstants.html" title="class in org.apache.hadoop.hbase">HConstants</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/WALFactory.html#DEFAULT_META_WAL_PROVIDER">DEFAULT_META_WAL_PROVIDER</a></span> - Static variable in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.html#DEFAULT_MIN_AGE_MS">DEFAULT_MIN_AGE_MS</a></span> - Static variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.html" title="class in org.apache.hadoop.hbase.regionserver">BusyRegionSplitPolicy</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/ipc/RpcServer.html#DEFAULT_MIN_CLIENT_REQUEST_TIMEOUT">DEFAULT_MIN_CLIENT_REQUEST_TIMEOUT</a></span> - Static variable in class org.apache.hadoop.hbase.ipc.<a href="org/apache/hadoop/hbase/ipc/RpcServer.html" title="class in org.apache.hadoop.hbase.ipc">RpcServer</a></dt>
@@ -24566,6 +24569,10 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/RegionMover.html#deleteFile-java.lang.String-">deleteFile(String)</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/RegionMover.html" title="class in org.apache.hadoop.hbase.util">RegionMover</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#deleteFileOnClose-java.io.File-">deleteFileOnClose(File)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a></dt>
+<dd>
+<div class="block">Create an input stream that deletes the file after reading it.</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/cleaner/CleanerChore.html#deleteFiles-java.lang.Iterable-">deleteFiles(Iterable&lt;FileStatus&gt;)</a></span> - Method in class org.apache.hadoop.hbase.master.cleaner.<a href="org/apache/hadoop/hbase/master/cleaner/CleanerChore.html" title="class in org.apache.hadoop.hbase.master.cleaner">CleanerChore</a></dt>
 <dd>
 <div class="block">Delete the given files</div>
@@ -25342,9 +25349,9 @@
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.html#DESCRIPTOR_VERSION">DESCRIPTOR_VERSION</a></span> - Static variable in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.html" title="class in org.apache.hadoop.hbase.snapshot">SnapshotManifestV2</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserialiserIndex">deserialiserIndex</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></dt>
-<dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#deserialiserMap">deserialiserMap</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a></dt>
-<dd>&nbsp;</dd>
+<dd>
+<div class="block">The index of the deserializer that can deserialize this BucketEntry content.</div>
+</dd>
 <dt><a href="org/apache/hadoop/hbase/exceptions/DeserializationException.html" title="class in org.apache.hadoop.hbase.exceptions"><span class="typeNameLink">DeserializationException</span></a> - Exception in <a href="org/apache/hadoop/hbase/exceptions/package-summary.html">org.apache.hadoop.hbase.exceptions</a></dt>
 <dd>
 <div class="block">Failed deserialization.</div>
@@ -25371,6 +25378,10 @@
 <dd>
 <div class="block">Deserialize the fixed file trailer from the given stream.</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-boolean-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType-">deserialize(ByteBuff, boolean, Cacheable.MemoryType)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockDeserializer</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-">deserialize(ByteBuff)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockDeserializer</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/mapreduce/CellSerialization.CellDeserializer.html#deserialize-org.apache.hadoop.hbase.Cell-">deserialize(Cell)</a></span> - Method in class org.apache.hadoop.hbase.mapreduce.<a href="org/apache/hadoop/hbase/mapreduce/CellSerialization.CellDeserializer.html" title="class in org.apache.hadoop.hbase.mapreduce">CellSerialization.CellDeserializer</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/mapreduce/MutationSerialization.MutationDeserializer.html#deserialize-org.apache.hadoop.hbase.client.Mutation-">deserialize(Mutation)</a></span> - Method in class org.apache.hadoop.hbase.mapreduce.<a href="org/apache/hadoop/hbase/mapreduce/MutationSerialization.MutationDeserializer.html" title="class in org.apache.hadoop.hbase.mapreduce">MutationSerialization.MutationDeserializer</a></dt>
@@ -25401,7 +25412,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.html#DESERIALIZER_IDENTIFIER">DESERIALIZER_IDENTIFIER</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">deserializerReference(UniqueIndexMap&lt;Integer&gt;)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference--">deserializerReference()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/assignment/AssignProcedure.html#deserializeStateData-org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer-">deserializeStateData(ProcedureStateSerializer)</a></span> - Method in class org.apache.hadoop.hbase.master.assignment.<a href="org/apache/hadoop/hbase/master/assignment/AssignProcedure.html" title="class in org.apache.hadoop.hbase.master.assignment">AssignProcedure</a></dt>
 <dd>&nbsp;</dd>
@@ -29075,7 +29086,7 @@
 <dd>
 <div class="block">Minor compaction flag in FileInfo</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#excludeAlreadyBackedUpWALs-java.util.List-java.util.List-">excludeAlreadyBackedUpWALs(List&lt;String&gt;, List&lt;BackupSystemTable.WALItem&gt;)</a></span> - Method in class org.apache.hadoop.hbase.backup.impl.<a href="org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html" title="class in org.apache.hadoop.hbase.backup.impl">IncrementalBackupManager</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html#excludeAlreadyBackedUpAndProcV2WALs-java.util.List-java.util.List-">excludeAlreadyBackedUpAndProcV2WALs(List&lt;String&gt;, List&lt;BackupSystemTable.WALItem&gt;)</a></span> - Method in class org.apache.hadoop.hbase.backup.impl.<a href="org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html" title="class in org.apache.hadoop.hbase.backup.impl">IncrementalBackupManager</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/RegionMover.html#excludeFile">excludeFile</a></span> - Variable in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/RegionMover.html" title="class in org.apache.hadoop.hbase.util">RegionMover</a></dt>
 <dd>&nbsp;</dd>
@@ -33081,6 +33092,10 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/net/Address.html#fromParts-java.lang.String-int-">fromParts(String, int)</a></span> - Static method in class org.apache.hadoop.hbase.net.<a href="org/apache/hadoop/hbase/net/Address.html" title="class in org.apache.hadoop.hbase.net">Address</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#fromPB-java.util.Map-org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap-">fromPB(Map&lt;Integer, String&gt;, BucketCacheProtos.BackingMap)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketProtoUtils</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#fromPb-org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType-">fromPb(BucketCacheProtos.BlockType)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketProtoUtils</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/Chunk.html#fromPool">fromPool</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/Chunk.html" title="class in org.apache.hadoop.hbase.regionserver">Chunk</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/backup/BackupInfo.html#fromProto-org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo-">fromProto(BackupProtos.BackupInfo)</a></span> - Static method in class org.apache.hadoop.hbase.backup.<a href="org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a></dt>
@@ -34311,6 +34326,8 @@
 <dd>
 <div class="block">Gets the count of accesses to the mob file cache.</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getAccessCounter--">getAccessCounter()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/Mutation.html#getACL--">getACL()</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/Query.html#getACL--">getACL()</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/Query.html" title="class in org.apache.hadoop.hbase.client">Query</a></dt>
@@ -38293,13 +38310,15 @@
 <dd>
 <div class="block">Get the identifier of this deserialiser.</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#getDeserialiserIdentifier--">getDeserialiserIdentifier()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockDeserializer</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/Cacheable.html#getDeserializer--">getDeserializer()</a></span> - Method in interface org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a></dt>
 <dd>
 <div class="block">Returns CacheableDeserializer instance which reconstructs original object from ByteBuffer.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#getDeserializer-int-">getDeserializer(int)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheableDeserializerIdManager</a></dt>
 <dd>
-<div class="block">Get the cacheable deserializer as the given identifier Id</div>
+<div class="block">Get the cacheable deserializer registered at the given identifier Id.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getDeserializer--">getDeserializer()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
 <dd>&nbsp;</dd>
@@ -54016,6 +54035,8 @@
 <dd>
 <div class="block">Get the directory to build a snapshot, before it is finalized</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html#getWrappedProvider--">getWrappedProvider()</a></span> - Method in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html" title="class in org.apache.hadoop.hbase.wal">SyncReplicationWALProvider</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/MetricsIO.html#getWrapper--">getWrapper()</a></span> - Method in class org.apache.hadoop.hbase.io.<a href="org/apache/hadoop/hbase/io/MetricsIO.html" title="class in org.apache.hadoop.hbase.io">MetricsIO</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/Writables.html#getWritable-byte:A-org.apache.hadoop.io.Writable-">getWritable(byte[], Writable)</a></span> - Static method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/Writables.html" title="class in org.apache.hadoop.hbase.util">Writables</a></dt>
@@ -57293,6 +57314,8 @@
 <dd>
 <div class="block">Creates a block from an existing buffer starting with a header.</div>
 </dd>
+<dt><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.BlockDeserializer</span></a> - Class in <a href="org/apache/hadoop/hbase/io/hfile/package-summary.html">org.apache.hadoop.hbase.io.hfile</a></dt>
+<dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.BlockIterator</span></a> - Interface in <a href="org/apache/hadoop/hbase/io/hfile/package-summary.html">org.apache.hadoop.hbase.io.hfile</a></dt>
 <dd>
 <div class="block">Iterator for <a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFileBlock</code></a>s.</div>
@@ -70767,8 +70790,6 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.HFileCellMapper.html#map-org.apache.hadoop.io.NullWritable-org.apache.hadoop.hbase.Cell-org.apache.hadoop.mapreduce.Mapper.Context-">map(NullWritable, Cell, Mapper&lt;NullWritable, Cell, ImmutableBytesWritable, Cell&gt;.Context)</a></span> - Method in class org.apache.hadoop.hbase.backup.mapreduce.<a href="org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.HFileCellMapper.html" title="class in org.apache.hadoop.hbase.backup.mapreduce">MapReduceHFileSplitterJob.HFileCellMapper</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#map-T-">map(T)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#map">map</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.FileInfo</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/LruBlockCache.html#map">map</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache</a></dt>
@@ -74450,8 +74471,6 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetricsZooKeeperSourceImpl.html#MetricsZooKeeperSourceImpl-java.lang.String-java.lang.String-java.lang.String-java.lang.String-">MetricsZooKeeperSourceImpl(String, String, String, String)</a></span> - Constructor for class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetricsZooKeeperSourceImpl.html" title="class in org.apache.hadoop.hbase.zookeeper">MetricsZooKeeperSourceImpl</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#mForwardMap">mForwardMap</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.html#MID_KEY_METADATA_SIZE">MID_KEY_METADATA_SIZE</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex</a></dt>
 <dd>
 <div class="block">The size of a meta-data record used for finding the mid-key in a
@@ -74644,8 +74663,6 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/ipc/AdaptiveLifoCoDelCallQueue.html#minDelay">minDelay</a></span> - Variable in class org.apache.hadoop.hbase.ipc.<a href="org/apache/hadoop/hbase/ipc/AdaptiveLifoCoDelCallQueue.html" title="class in org.apache.hadoop.hbase.ipc">AdaptiveLifoCoDelCallQueue</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#mIndex">mIndex</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/AssignmentVerificationReport.html#minDispersionNum">minDispersionNum</a></span> - Variable in class org.apache.hadoop.hbase.master.<a href="org/apache/hadoop/hbase/master/AssignmentVerificationReport.html" title="class in org.apache.hadoop.hbase.master">AssignmentVerificationReport</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/AssignmentVerificationReport.html#minDispersionNumServerSet">minDispersionNumServerSet</a></span> - Variable in class org.apache.hadoop.hbase.master.<a href="org/apache/hadoop/hbase/master/AssignmentVerificationReport.html" title="class in org.apache.hadoop.hbase.master">AssignmentVerificationReport</a></dt>
@@ -75627,8 +75644,6 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#MR_NUM_MAPS">MR_NUM_MAPS</a></span> - Static variable in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#mReverseMap">mReverseMap</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancerSourceImpl.html#MRU_LOAD_FACTOR">MRU_LOAD_FACTOR</a></span> - Static variable in class org.apache.hadoop.hbase.master.balancer.<a href="org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancerSourceImpl.html" title="class in org.apache.hadoop.hbase.master.balancer">MetricsStochasticBalancerSourceImpl</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancerSourceImpl.html#mruCap">mruCap</a></span> - Variable in class org.apache.hadoop.hbase.master.balancer.<a href="org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancerSourceImpl.html" title="class in org.apache.hadoop.hbase.master.balancer">MetricsStochasticBalancerSourceImpl</a></dt>
@@ -80926,6 +80941,8 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MetaTableAccessor.html#parseParentsBytes-byte:A-">parseParentsBytes(byte[])</a></span> - Static method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MetaTableAccessor.html" title="class in org.apache.hadoop.hbase">MetaTableAccessor</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#parsePB-org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry-">parsePB(BucketCacheProtos.BucketCacheEntry)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#parsePB-org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto-">parsePB(HFileProtos.FileInfoProto)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.FileInfo</a></dt>
 <dd>
 <div class="block">Fill our map with content of the pb we read off disk</div>
@@ -90974,8 +90991,9 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#registerDeserializer-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-">registerDeserializer(CacheableDeserializer&lt;Cacheable&gt;)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheableDeserializerIdManager</a></dt>
 <dd>
-<div class="block">Register the given cacheable deserializer and generate an unique identifier
- id for it</div>
+<div class="block">Register the given <a href="org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>Cacheable</code></a> -- usually an hfileblock instance, these implement
+ the Cacheable Interface -- deserializer and generate an unique identifier id for it and return
+ this as our result.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#registeredDeserializers">registeredDeserializers</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheableDeserializerIdManager</a></dt>
 <dd>&nbsp;</dd>
@@ -97634,6 +97652,11 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/filter/FuzzyRowFilter.html#satisfiesNoUnsafe-boolean-byte:A-int-int-byte:A-byte:A-">satisfiesNoUnsafe(boolean, byte[], int, int, byte[], byte[])</a></span> - Static method in class org.apache.hadoop.hbase.filter.<a href="org/apache/hadoop/hbase/filter/FuzzyRowFilter.html" title="class in org.apache.hadoop.hbase.filter">FuzzyRowFilter</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#save--">save()</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheableDeserializerIdManager</a></dt>
+<dd>
+<div class="block">Snapshot a map of the current identifiers to class names for reconstruction on reading out
+ of a file.</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html#saveMetadata-org.apache.hadoop.hbase.io.hfile.HFile.Writer-">saveMetadata(HFile.Writer)</a></span> - Method in interface org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a></dt>
 <dd>
 <div class="block">Save metadata in HFile which will be written to disk</div>
@@ -99379,8 +99402,6 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/CacheFullException.html#serialVersionUID">serialVersionUID</a></span> - Static variable in exception org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/CacheFullException.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">CacheFullException</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#serialVersionUID">serialVersionUID</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html#serialVersionUID">serialVersionUID</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UnsafeSharedMemoryBucketEntry</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CorruptHFileException.html#serialVersionUID">serialVersionUID</a></span> - Static variable in exception org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CorruptHFileException.html" title="class in org.apache.hadoop.hbase.io.hfile">CorruptHFileException</a></dt>
@@ -101309,7 +101330,7 @@ service.</div>
 <dd>
 <div class="block">Sets the region/store name, for logging.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">setDeserialiserReference(CacheableDeserializer&lt;Cacheable&gt;, UniqueIndexMap&lt;Integer&gt;)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-">setDeserialiserReference(CacheableDeserializer&lt;Cacheable&gt;)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/RegionPlan.html#setDestination-org.apache.hadoop.hbase.ServerName-">setDestination(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.master.<a href="org/apache/hadoop/hbase/master/RegionPlan.html" title="class in org.apache.hadoop.hbase.master">RegionPlan</a></dt>
 <dd>
@@ -114442,6 +114463,18 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.html#toPathList-java.util.List-">toPathList(List&lt;Pair&lt;TableName, Path&gt;&gt;)</a></span> - Method in class org.apache.hadoop.hbase.backup.mapreduce.<a href="org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.html" title="class in org.apache.hadoop.hbase.backup.mapreduce">MapReduceBackupMergeJob</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.bucket.BucketCache-">toPB(BucketCache)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketProtoUtils</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-java.util.Map-">toPB(Map&lt;BlockCacheKey, BucketCache.BucketEntry&gt;)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketProtoUtils</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-">toPB(BlockCacheKey)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketProtoUtils</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.BlockType-">toPB(BlockType)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketProtoUtils</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry-">toPB(BucketCache.BucketEntry)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketProtoUtils</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.BlockPriority-">toPB(BlockPriority)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketProtoUtils</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/StoreScanner.html#topChanged">topChanged</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/StoreScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreScanner</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/replication/RefreshPeerProcedure.html#toPeerModificationType-org.apache.hadoop.hbase.master.procedure.PeerProcedureInterface.PeerOperationType-">toPeerModificationType(PeerProcedureInterface.PeerOperationType)</a></span> - Static method in class org.apache.hadoop.hbase.master.replication.<a href="org/apache/hadoop/hbase/master/replication/RefreshPeerProcedure.html" title="class in org.apache.hadoop.hbase.master.replication">RefreshPeerProcedure</a></dt>
@@ -116797,12 +116830,6 @@ service.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/HDFSBlocksDistribution.html#uniqueBlocksTotalWeight">uniqueBlocksTotalWeight</a></span> - Variable in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/HDFSBlocksDistribution.html" title="class in org.apache.hadoop.hbase">HDFSBlocksDistribution</a></dt>
 <dd>&nbsp;</dd>
-<dt><a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">UniqueIndexMap</span></a>&lt;<a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="type parameter in UniqueIndexMap">T</a>&gt; - Class in <a href="org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html">org.apache.hadoop.hbase.io.hfile.bucket</a></dt>
-<dd>
-<div class="block">Map from type T to int and vice-versa.</div>
-</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#UniqueIndexMap--">UniqueIndexMap()</a></span> - Constructor for class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.html#uniqueRackList">uniqueRackList</a></span> - Variable in class org.apache.hadoop.hbase.favored.<a href="org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.html" title="class in org.apache.hadoop.hbase.favored">FavoredNodeAssignmentHelper</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/Size.html#unit">unit</a></span> - Variable in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/Size.html" title="class in org.apache.hadoop.hbase">Size</a></dt>
@@ -116910,8 +116937,6 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/HBaseFsck.html#unlockHbck--">unlockHbck()</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#unmap-int-">unmap(int)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html#unmodifiableTableCFsMap-java.util.Map-">unmodifiableTableCFsMap(Map&lt;TableName, List&lt;String&gt;&gt;)</a></span> - Method in class org.apache.hadoop.hbase.replication.<a href="org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html" title="class in org.apache.hadoop.hbase.replication">ReplicationPeerConfig</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.html#unmodifiedTableDescriptor">unmodifiedTableDescriptor</a></span> - Variable in class org.apache.hadoop.hbase.master.procedure.<a href="org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.html" title="class in org.apache.hadoop.hbase.master.procedure">ModifyTableProcedure</a></dt>
@@ -120710,6 +120735,8 @@ the order they are declared.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HStore.html#verifyBulkLoads">verifyBulkLoads</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#verifyCapacityAndClasses-long-java.lang.String-java.lang.String-">verifyCapacityAndClasses(long, String, String)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#verifyChecksum">verifyChecksum</a></span> - Variable in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot.ExportMapper</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#verifyChecksum">verifyChecksum</a></span> - Variable in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot</a></dt>
@@ -123674,7 +123701,7 @@ the order they are declared.</div>
 <dd>
 <div class="block">Writes the block to the provided stream.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-java.util.concurrent.atomic.LongAdder-">writeToCache(IOEngine, BucketAllocator, UniqueIndexMap&lt;Integer&gt;, LongAdder)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-java.util.concurrent.atomic.LongAdder-">writeToCache(IOEngine, BucketAllocator, LongAdder)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/ipc/ServerCall.html#writeToCOS-org.apache.hbase.thirdparty.com.google.protobuf.Message-org.apache.hbase.thirdparty.com.google.protobuf.Message-int-java.nio.ByteBuffer-">writeToCOS(Message, Message, int, ByteBuffer)</a></span> - Static method in class org.apache.hadoop.hbase.ipc.<a href="org/apache/hadoop/hbase/ipc/ServerCall.html" title="class in org.apache.hadoop.hbase.ipc">ServerCall</a></dt>
 <dd>&nbsp;</dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
index c7daf0a..742cef5 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.98">BackupSystemTable.WALItem</a>
+<pre>static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#line.99">BackupSystemTable.WALItem</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -223,7 +223,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>backupId</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.99">backupId</a></pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.100">backupId</a></pre>
 </li>
 </ul>
 <a name="walFile">
@@ -232,7 +232,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>walFile</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.100">walFile</a></pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.101">walFile</a></pre>
 </li>
 </ul>
 <a name="backupRoot">
@@ -241,7 +241,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>backupRoot</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.101">backupRoot</a></pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.102">backupRoot</a></pre>
 </li>
 </ul>
 </li>
@@ -258,7 +258,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WALItem</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.103">WALItem</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupId,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.104">WALItem</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupId,
         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;walFile,
         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;backupRoot)</pre>
 </li>
@@ -277,7 +277,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getBackupId</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.109">getBackupId</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.110">getBackupId</a>()</pre>
 </li>
 </ul>
 <a name="getWalFile--">
@@ -286,7 +286,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getWalFile</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.113">getWalFile</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.114">getWalFile</a>()</pre>
 </li>
 </ul>
 <a name="getBackupRoot--">
@@ -295,7 +295,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getBackupRoot</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.117">getBackupRoot</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.118">getBackupRoot</a>()</pre>
 </li>
 </ul>
 <a name="toString--">
@@ -304,7 +304,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.122">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html#line.123">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>


[49/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/book.html
----------------------------------------------------------------------
diff --git a/book.html b/book.html
index fc5ffdb..4c27d13 100644
--- a/book.html
+++ b/book.html
@@ -14672,6 +14672,121 @@ See <a href="#arch.catalog.meta">hbase:meta</a> for more information on the meta
 </div>
 </div>
 </div>
+<div class="sect2">
+<h3 id="master.wal"><a class="anchor" href="#master.wal"></a>69.5. MasterProcWAL</h3>
+<div class="paragraph">
+<p>HMaster records administrative operations and their running states, such as the handling of a crashed server,
+table creation, and other DDLs, into its own WAL file. The WALs are stored under the MasterProcWALs
+directory. The Master WALs are not like RegionServer WALs. Keeping up the Master WAL allows
+us run a state machine that is resilient across Master failures. For example, if a HMaster was in the
+middle of creating a table encounters an issue and fails, the next active HMaster can take up where
+the previous left off and carry the operation to completion. Since hbase-2.0.0, a
+new AssignmentManager (A.K.A AMv2) was introduced and the HMaster handles region assignment
+operations, server crash processing, balancing, etc., all via AMv2 persisting all state and
+transitions into MasterProcWALs rather than up into ZooKeeper, as we do in hbase-1.x.</p>
+</div>
+<div class="paragraph">
+<p>See <a href="#amv2">AMv2 Description for Devs</a> (and <a href="#pv2">Procedure Framework (Pv2): <a href="https://issues.apache.org/jira/browse/HBASE-12439">HBASE-12439</a></a> for its basis) if you would like to learn more about the new
+AssignmentManager.</p>
+</div>
+<div class="sect3">
+<h4 id="master.wal.conf"><a class="anchor" href="#master.wal.conf"></a>69.5.1. Configurations for MasterProcWAL</h4>
+<div class="paragraph">
+<p>Here are the list of configurations that effect MasterProcWAL operation.
+You should not have to change your defaults.</p>
+</div>
+<div id="hbase.procedure.store.wal.periodic.roll.msec" class="dlist">
+<dl>
+<dt class="hdlist1"><strong><code>hbase.procedure.store.wal.periodic.roll.msec</code></strong></dt>
+<dd>
+<div class="paragraph">
+<div class="title">Description</div>
+<p>Frequency of generating a new WAL</p>
+</div>
+<div class="paragraph">
+<div class="title">Default</div>
+<p><code>1h (3600000 in msec)</code></p>
+</div>
+</dd>
+</dl>
+</div>
+<div id="hbase.procedure.store.wal.roll.threshold" class="dlist">
+<dl>
+<dt class="hdlist1"><strong><code>hbase.procedure.store.wal.roll.threshold</code></strong></dt>
+<dd>
+<div class="paragraph">
+<div class="title">Description</div>
+<p>Threshold in size before the WAL rolls. Every time the WAL reaches this size or the above period, 1 hour, passes since last log roll, the HMaster will generate a new WAL.</p>
+</div>
+<div class="paragraph">
+<div class="title">Default</div>
+<p><code>32MB (33554432 in byte)</code></p>
+</div>
+</dd>
+</dl>
+</div>
+<div id="hbase.procedure.store.wal.warn.threshold" class="dlist">
+<dl>
+<dt class="hdlist1"><strong><code>hbase.procedure.store.wal.warn.threshold</code></strong></dt>
+<dd>
+<div class="paragraph">
+<div class="title">Description</div>
+<p>If the number of WALs goes beyond this threshold, the following message should appear in the HMaster log with WARN level when rolling.</p>
+</div>
+<div class="literalblock">
+<div class="content">
+<pre>procedure WALs count=xx above the warning threshold 64. check running procedures to see if something is stuck.</pre>
+</div>
+</div>
+<div class="paragraph">
+<div class="title">Default</div>
+<p><code>64</code></p>
+</div>
+</dd>
+</dl>
+</div>
+<div id="hbase.procedure.store.wal.max.retries.before.roll" class="dlist">
+<dl>
+<dt class="hdlist1"><strong><code>hbase.procedure.store.wal.max.retries.before.roll</code></strong></dt>
+<dd>
+<div class="paragraph">
+<div class="title">Description</div>
+<p>Max number of retry when syncing slots (records) to its underlying storage, such as HDFS. Every attempt, the following message should appear in the HMaster log.</p>
+</div>
+<div class="literalblock">
+<div class="content">
+<pre>unable to sync slots, retry=xx</pre>
+</div>
+</div>
+<div class="paragraph">
+<div class="title">Default</div>
+<p><code>3</code></p>
+</div>
+</dd>
+</dl>
+</div>
+<div id="hbase.procedure.store.wal.sync.failure.roll.max" class="dlist">
+<dl>
+<dt class="hdlist1"><strong><code>hbase.procedure.store.wal.sync.failure.roll.max</code></strong></dt>
+<dd>
+<div class="paragraph">
+<div class="title">Description</div>
+<p>After the above 3 retrials, the log is rolled and the retry count is reset to 0, thereon a new set of retrial starts. This configuration controls the max number of attempts of log rolling upon sync failure. That is, HMaster is allowed to fail to sync 9 times in total. Once it exceeds, the following log should appear in the HMaster log.</p>
+</div>
+<div class="literalblock">
+<div class="content">
+<pre>Sync slots after log roll failed, abort.</pre>
+</div>
+</div>
+<div class="paragraph">
+<div class="title">Default</div>
+<p><code>3</code></p>
+</div>
+</dd>
+</dl>
+</div>
+</div>
+</div>
 </div>
 </div>
 <div class="sect1">
@@ -15320,7 +15435,8 @@ You will likely find references to the HLog in documentation tailored to these o
 by a short name label (that unfortunately is not always descriptive). You set the provider in
 <em>hbase-site.xml</em> passing the WAL provder short-name as the value on the
 <em>hbase.wal.provider</em> property (Set the provider for <em>hbase:meta</em> using the
-<em>hbase.wal.meta_provider</em> property).</p>
+<em>hbase.wal.meta_provider</em> property, otherwise it uses the same provider configured
+by <em>hbase.wal.provider</em>).</p>
 </div>
 <div class="ulist">
 <ul>
@@ -40976,7 +41092,7 @@ org/apache/hadoop/hbase/security/access/AccessControlClient.revoke:(Lorg/apache/
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2018-08-01 14:29:55 UTC
+Last updated 2018-08-02 19:32:10 UTC
 </div>
 </div>
 </body>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/bulk-loads.html
----------------------------------------------------------------------
diff --git a/bulk-loads.html b/bulk-loads.html
index 9b3e7dd..e292b41 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Bulk Loads in Apache HBase (TM)
@@ -306,7 +306,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 


[50/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/apache_hbase_reference_guide.pdf
----------------------------------------------------------------------
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 6dd953e..4a52551 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,16 +5,16 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20180801142955+00'00')
-/CreationDate (D:20180801144546+00'00')
+/ModDate (D:20180802193210+00'00')
+/CreationDate (D:20180802194755+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 28 0 R
-/Outlines 4972 0 R
-/PageLabels 5223 0 R
+/Outlines 4987 0 R
+/PageLabels 5238 0 R
 /PageMode /UseOutlines
 /OpenAction [7 0 R /FitH 842.89]
 /ViewerPreferences << /DisplayDocTitle true
@@ -23,8 +23,8 @@ endobj
 endobj
 3 0 obj
 << /Type /Pages
-/Count 783
-/Kids [7 0 R 12 0 R 14 0 R 16 0 R 18 0 R 20 0 R 22 0 R 24 0 R 26 0 R 46 0 R 49 0 R 52 0 R 56 0 R 63 0 R 65 0 R 69 0 R 71 0 R 73 0 R 80 0 R 83 0 R 85 0 R 91 0 R 94 0 R 96 0 R 98 0 R 105 0 R 112 0 R 117 0 R 119 0 R 135 0 R 140 0 R 148 0 R 157 0 R 165 0 R 169 0 R 178 0 R 189 0 R 193 0 R 195 0 R 199 0 R 208 0 R 217 0 R 225 0 R 234 0 R 239 0 R 248 0 R 256 0 R 265 0 R 278 0 R 285 0 R 295 0 R 303 0 R 311 0 R 318 0 R 327 0 R 333 0 R 339 0 R 346 0 R 354 0 R 362 0 R 373 0 R 386 0 R 394 0 R 401 0 R 409 0 R 417 0 R 426 0 R 436 0 R 444 0 R 450 0 R 459 0 R 471 0 R 481 0 R 488 0 R 496 0 R 503 0 R 512 0 R 520 0 R 524 0 R 530 0 R 535 0 R 539 0 R 555 0 R 566 0 R 570 0 R 585 0 R 590 0 R 595 0 R 597 0 R 599 0 R 602 0 R 604 0 R 606 0 R 614 0 R 620 0 R 623 0 R 627 0 R 636 0 R 647 0 R 655 0 R 659 0 R 663 0 R 665 0 R 675 0 R 690 0 R 697 0 R 708 0 R 718 0 R 729 0 R 741 0 R 761 0 R 771 0 R 778 0 R 782 0 R 788 0 R 791 0 R 795 0 R 799 0 R 802 0 R 805 0 R 807 0 R 810 0 R 814 0 R 816 0 R 820 0 R 826 0 R 831 0 R 
 835 0 R 838 0 R 844 0 R 846 0 R 850 0 R 858 0 R 860 0 R 863 0 R 866 0 R 869 0 R 872 0 R 886 0 R 894 0 R 905 0 R 916 0 R 922 0 R 932 0 R 943 0 R 946 0 R 950 0 R 953 0 R 958 0 R 967 0 R 975 0 R 979 0 R 983 0 R 988 0 R 992 0 R 994 0 R 1010 0 R 1021 0 R 1026 0 R 1033 0 R 1036 0 R 1044 0 R 1052 0 R 1057 0 R 1062 0 R 1067 0 R 1069 0 R 1071 0 R 1073 0 R 1083 0 R 1091 0 R 1095 0 R 1102 0 R 1109 0 R 1117 0 R 1121 0 R 1127 0 R 1132 0 R 1140 0 R 1144 0 R 1149 0 R 1151 0 R 1157 0 R 1165 0 R 1171 0 R 1178 0 R 1189 0 R 1193 0 R 1195 0 R 1197 0 R 1201 0 R 1204 0 R 1209 0 R 1212 0 R 1224 0 R 1228 0 R 1234 0 R 1242 0 R 1247 0 R 1251 0 R 1255 0 R 1257 0 R 1260 0 R 1263 0 R 1266 0 R 1270 0 R 1274 0 R 1278 0 R 1283 0 R 1287 0 R 1290 0 R 1292 0 R 1302 0 R 1304 0 R 1309 0 R 1322 0 R 1326 0 R 1332 0 R 1334 0 R 1345 0 R 1348 0 R 1354 0 R 1362 0 R 1365 0 R 1372 0 R 1379 0 R 1382 0 R 1384 0 R 1393 0 R 1395 0 R 1397 0 R 1400 0 R 1402 0 R 1404 0 R 1406 0 R 1408 0 R 1411 0 R 1415 0 R 1420 0 R 1422 0 R 1424 0 R 
 1426 0 R 1431 0 R 1438 0 R 1444 0 R 1447 0 R 1449 0 R 1452 0 R 1456 0 R 1460 0 R 1463 0 R 1465 0 R 1467 0 R 1470 0 R 1475 0 R 1481 0 R 1489 0 R 1503 0 R 1517 0 R 1520 0 R 1525 0 R 1538 0 R 1543 0 R 1558 0 R 1566 0 R 1570 0 R 1579 0 R 1594 0 R 1608 0 R 1616 0 R 1621 0 R 1632 0 R 1637 0 R 1643 0 R 1649 0 R 1661 0 R 1664 0 R 1673 0 R 1676 0 R 1685 0 R 1691 0 R 1695 0 R 1700 0 R 1712 0 R 1714 0 R 1720 0 R 1726 0 R 1729 0 R 1737 0 R 1745 0 R 1749 0 R 1751 0 R 1753 0 R 1765 0 R 1771 0 R 1780 0 R 1786 0 R 1799 0 R 1805 0 R 1811 0 R 1822 0 R 1828 0 R 1833 0 R 1837 0 R 1841 0 R 1844 0 R 1849 0 R 1854 0 R 1860 0 R 1865 0 R 1869 0 R 1878 0 R 1884 0 R 1887 0 R 1891 0 R 1900 0 R 1907 0 R 1913 0 R 1919 0 R 1923 0 R 1927 0 R 1932 0 R 1937 0 R 1943 0 R 1945 0 R 1947 0 R 1950 0 R 1961 0 R 1964 0 R 1971 0 R 1979 0 R 1984 0 R 1988 0 R 1993 0 R 1995 0 R 1998 0 R 2003 0 R 2006 0 R 2008 0 R 2011 0 R 2014 0 R 2017 0 R 2027 0 R 2032 0 R 2037 0 R 2039 0 R 2047 0 R 2054 0 R 2061 0 R 2067 0 R 2072 0 R 2074 0 
 R 2083 0 R 2093 0 R 2103 0 R 2109 0 R 2116 0 R 2118 0 R 2123 0 R 2125 0 R 2127 0 R 2131 0 R 2134 0 R 2137 0 R 2142 0 R 2146 0 R 2157 0 R 2160 0 R 2163 0 R 2167 0 R 2171 0 R 2174 0 R 2176 0 R 2181 0 R 2184 0 R 2186 0 R 2191 0 R 2201 0 R 2203 0 R 2205 0 R 2207 0 R 2209 0 R 2212 0 R 2214 0 R 2216 0 R 2219 0 R 2221 0 R 2223 0 R 2227 0 R 2232 0 R 2241 0 R 2243 0 R 2245 0 R 2251 0 R 2253 0 R 2258 0 R 2260 0 R 2262 0 R 2269 0 R 2274 0 R 2278 0 R 2283 0 R 2287 0 R 2289 0 R 2291 0 R 2295 0 R 2298 0 R 2300 0 R 2302 0 R 2306 0 R 2308 0 R 2311 0 R 2313 0 R 2315 0 R 2317 0 R 2324 0 R 2327 0 R 2332 0 R 2334 0 R 2336 0 R 2338 0 R 2340 0 R 2348 0 R 2359 0 R 2373 0 R 2384 0 R 2388 0 R 2393 0 R 2397 0 R 2400 0 R 2405 0 R 2411 0 R 2413 0 R 2416 0 R 2418 0 R 2420 0 R 2422 0 R 2427 0 R 2429 0 R 2442 0 R 2445 0 R 2453 0 R 2459 0 R 2471 0 R 2485 0 R 2498 0 R 2517 0 R 2519 0 R 2521 0 R 2525 0 R 2543 0 R 2549 0 R 2561 0 R 2565 0 R 2569 0 R 2578 0 R 2590 0 R 2595 0 R 2605 0 R 2618 0 R 2637 0 R 2646 0 R 2649 
 0 R 2658 0 R 2675 0 R 2682 0 R 2685 0 R 2690 0 R 2694 0 R 2697 0 R 2706 0 R 2715 0 R 2718 0 R 2720 0 R 2724 0 R 2738 0 R 2747 0 R 2752 0 R 2756 0 R 2759 0 R 2761 0 R 2763 0 R 2765 0 R 2770 0 R 2783 0 R 2793 0 R 2801 0 R 2807 0 R 2812 0 R 2823 0 R 2830 0 R 2836 0 R 2838 0 R 2847 0 R 2855 0 R 2857 0 R 2861 0 R 2863 0 R 2874 0 R 2880 0 R 2882 0 R 2891 0 R 2894 0 R 2904 0 R 2908 0 R 2917 0 R 2925 0 R 2930 0 R 2934 0 R 2938 0 R 2940 0 R 2946 0 R 2950 0 R 2954 0 R 2960 0 R 2966 0 R 2969 0 R 2975 0 R 2979 0 R 2988 0 R 2993 0 R 2999 0 R 3009 0 R 3016 0 R 3023 0 R 3026 0 R 3029 0 R 3036 0 R 3041 0 R 3044 0 R 3049 0 R 3059 0 R 3064 0 R 3066 0 R 3070 0 R 3075 0 R 3080 0 R 3087 0 R 3094 0 R 3101 0 R 3109 0 R 3115 0 R 3120 0 R 3124 0 R 3127 0 R 3136 0 R 3141 0 R 3145 0 R 3153 0 R 3158 0 R 3162 0 R 3164 0 R 3175 0 R 3180 0 R 3183 0 R 3185 0 R 3187 0 R 3197 0 R 3204 0 R 3208 0 R 3211 0 R 3218 0 R 3222 0 R 3225 0 R 3229 0 R 3234 0 R 3242 0 R 3247 0 R 3252 0 R 3257 0 R 3259 0 R 3262 0 R 3264 0 R 326
 8 0 R 3279 0 R 3281 0 R 3285 0 R 3288 0 R 3292 0 R 3295 0 R 3299 0 R 3301 0 R 3314 0 R 3319 0 R 3324 0 R 3330 0 R 3338 0 R 3340 0 R 3348 0 R 3366 0 R 3378 0 R 3384 0 R 3400 0 R 3404 0 R 3409 0 R 3411 0 R 3418 0 R 3421 0 R 3426 0 R 3429 0 R 3431 0 R 3433 0 R 3435 0 R 3438 0 R 3456 0 R 3459 0 R 3464 0 R 3470 0 R 3480 0 R 3485 0 R 3495 0 R 3505 0 R 3513 0 R 3518 0 R 3524 0 R 3529 0 R 3532 0 R 3540 0 R 3544 0 R 3549 0 R 3554 0 R 3567 0 R 3570 0 R 3576 0 R 3582 0 R 3587 0 R 3596 0 R 3605 0 R 3611 0 R 3620 0 R 3629 0 R 3634 0 R 3640 0 R 3646 0 R 3650 0 R 3652 0 R 3658 0 R 3665 0 R 3667 0 R 3674 0 R 3676 0 R 3683 0 R 3691 0 R 3697 0 R 3706 0 R 3712 0 R 3723 0 R 3732 0 R 3744 0 R 3757 0 R 3760 0 R 3762 0 R 3766 0 R 3779 0 R 3785 0 R 3790 0 R 3796 0 R 3800 0 R 3803 0 R 3808 0 R 3810 0 R 3814 0 R 3816 0 R 3820 0 R 3823 0 R 3826 0 R 3834 0 R 3836 0 R 3842 0 R 3845 0 R 3851 0 R 3855 0 R 3858 0 R 3861 0 R 3864 0 R 3868 0 R 3871 0 R 3876 0 R 3881 0 R 3884 0 R 3892 0 R 3896 0 R 3900 0 R 3902 0 R 3
 905 0 R 3908 0 R 3913 0 R 3920 0 R 3926 0 R 3930 0 R 3933 0 R 3940 0 R 3948 0 R 3952 0 R 3955 0 R 3957 0 R 3961 0 R 3966 0 R 3971 0 R 3974 0 R 3983 0 R 3988 0 R 3992 0 R 3995 0 R 4003 0 R 4008 0 R 4016 0 R 4021 0 R 4023 0 R 4029 0 R 4031 0 R 4036 0 R 4040 0 R 4045 0 R 4049 0 R 4061 0 R 4077 0 R 4092 0 R 4097 0 R 4100 0 R 4103 0 R 4109 0 R 4114 0 R 4116 0 R 4118 0 R 4120 0 R 4122 0 R 4124 0 R 4133 0 R 4137 0 R 4141 0 R 4145 0 R 4147 0 R 4154 0 R 4164 0 R 4173 0 R 4176 0 R 4179 0 R 4181 0 R 4188 0 R 4195 0 R 4206 0 R 4210 0 R 4213 0 R 4217 0 R 4220 0 R 4226 0 R 4229 0 R 4244 0 R 4249 0 R 4272 0 R 4276 0 R 4283 0 R 4294 0 R 4303 0 R 4306 0 R 4309 0 R 4312 0 R 4328 0 R 4333 0 R 4340 0 R 4343 0 R 4346 0 R 4353 0 R 4358 0 R 4362 0 R 4364 0 R 4371 0 R 4378 0 R 4386 0 R 4390 0 R 4395 0 R 4400 0 R 4406 0 R 4413 0 R 4420 0 R 4426 0 R 4434 0 R 4442 0 R 4446 0 R 4455 0 R 4463 0 R 4469 0 R]
+/Count 785
+/Kids [7 0 R 12 0 R 14 0 R 16 0 R 18 0 R 20 0 R 22 0 R 24 0 R 26 0 R 46 0 R 49 0 R 52 0 R 56 0 R 63 0 R 65 0 R 69 0 R 71 0 R 73 0 R 80 0 R 83 0 R 85 0 R 91 0 R 94 0 R 96 0 R 98 0 R 105 0 R 112 0 R 117 0 R 119 0 R 135 0 R 140 0 R 148 0 R 157 0 R 165 0 R 169 0 R 178 0 R 189 0 R 193 0 R 195 0 R 199 0 R 208 0 R 217 0 R 225 0 R 234 0 R 239 0 R 248 0 R 256 0 R 265 0 R 278 0 R 285 0 R 295 0 R 303 0 R 311 0 R 318 0 R 327 0 R 333 0 R 339 0 R 346 0 R 354 0 R 362 0 R 373 0 R 386 0 R 394 0 R 401 0 R 409 0 R 417 0 R 426 0 R 436 0 R 444 0 R 450 0 R 459 0 R 471 0 R 481 0 R 488 0 R 496 0 R 503 0 R 512 0 R 520 0 R 524 0 R 530 0 R 535 0 R 539 0 R 555 0 R 566 0 R 570 0 R 585 0 R 590 0 R 595 0 R 597 0 R 599 0 R 602 0 R 604 0 R 606 0 R 614 0 R 620 0 R 623 0 R 627 0 R 636 0 R 647 0 R 655 0 R 659 0 R 663 0 R 665 0 R 675 0 R 690 0 R 697 0 R 708 0 R 718 0 R 729 0 R 741 0 R 761 0 R 771 0 R 778 0 R 782 0 R 788 0 R 791 0 R 795 0 R 799 0 R 802 0 R 805 0 R 807 0 R 810 0 R 814 0 R 816 0 R 820 0 R 826 0 R 831 0 R 
 835 0 R 838 0 R 844 0 R 846 0 R 850 0 R 858 0 R 860 0 R 863 0 R 866 0 R 869 0 R 872 0 R 886 0 R 894 0 R 905 0 R 916 0 R 922 0 R 932 0 R 943 0 R 946 0 R 950 0 R 953 0 R 958 0 R 967 0 R 975 0 R 979 0 R 983 0 R 988 0 R 992 0 R 994 0 R 1010 0 R 1021 0 R 1026 0 R 1033 0 R 1036 0 R 1044 0 R 1052 0 R 1057 0 R 1062 0 R 1067 0 R 1069 0 R 1071 0 R 1073 0 R 1083 0 R 1091 0 R 1095 0 R 1102 0 R 1109 0 R 1117 0 R 1121 0 R 1127 0 R 1132 0 R 1140 0 R 1144 0 R 1149 0 R 1151 0 R 1157 0 R 1165 0 R 1171 0 R 1178 0 R 1189 0 R 1193 0 R 1195 0 R 1197 0 R 1201 0 R 1204 0 R 1209 0 R 1212 0 R 1224 0 R 1228 0 R 1234 0 R 1242 0 R 1247 0 R 1251 0 R 1255 0 R 1257 0 R 1260 0 R 1263 0 R 1266 0 R 1270 0 R 1274 0 R 1278 0 R 1283 0 R 1287 0 R 1290 0 R 1292 0 R 1302 0 R 1304 0 R 1309 0 R 1322 0 R 1326 0 R 1332 0 R 1334 0 R 1345 0 R 1348 0 R 1354 0 R 1362 0 R 1365 0 R 1372 0 R 1379 0 R 1382 0 R 1384 0 R 1393 0 R 1395 0 R 1397 0 R 1400 0 R 1402 0 R 1404 0 R 1406 0 R 1408 0 R 1411 0 R 1415 0 R 1420 0 R 1422 0 R 1424 0 R 
 1426 0 R 1431 0 R 1438 0 R 1444 0 R 1447 0 R 1449 0 R 1452 0 R 1456 0 R 1460 0 R 1463 0 R 1465 0 R 1467 0 R 1470 0 R 1475 0 R 1481 0 R 1489 0 R 1503 0 R 1517 0 R 1520 0 R 1525 0 R 1538 0 R 1543 0 R 1558 0 R 1566 0 R 1570 0 R 1579 0 R 1594 0 R 1606 0 R 1609 0 R 1623 0 R 1631 0 R 1636 0 R 1647 0 R 1652 0 R 1658 0 R 1664 0 R 1676 0 R 1679 0 R 1688 0 R 1691 0 R 1700 0 R 1705 0 R 1710 0 R 1714 0 R 1727 0 R 1729 0 R 1735 0 R 1741 0 R 1744 0 R 1752 0 R 1760 0 R 1764 0 R 1766 0 R 1768 0 R 1780 0 R 1786 0 R 1795 0 R 1802 0 R 1815 0 R 1821 0 R 1827 0 R 1838 0 R 1844 0 R 1849 0 R 1853 0 R 1857 0 R 1860 0 R 1865 0 R 1870 0 R 1876 0 R 1881 0 R 1885 0 R 1894 0 R 1900 0 R 1903 0 R 1907 0 R 1916 0 R 1923 0 R 1929 0 R 1935 0 R 1939 0 R 1943 0 R 1948 0 R 1953 0 R 1959 0 R 1961 0 R 1963 0 R 1966 0 R 1977 0 R 1980 0 R 1987 0 R 1995 0 R 2000 0 R 2003 0 R 2008 0 R 2010 0 R 2013 0 R 2018 0 R 2021 0 R 2023 0 R 2026 0 R 2029 0 R 2032 0 R 2042 0 R 2047 0 R 2052 0 R 2054 0 R 2062 0 R 2069 0 R 2076 0 R 2082 0 
 R 2087 0 R 2089 0 R 2098 0 R 2108 0 R 2118 0 R 2124 0 R 2131 0 R 2133 0 R 2138 0 R 2140 0 R 2142 0 R 2146 0 R 2149 0 R 2152 0 R 2157 0 R 2161 0 R 2172 0 R 2175 0 R 2178 0 R 2182 0 R 2186 0 R 2189 0 R 2191 0 R 2196 0 R 2199 0 R 2201 0 R 2206 0 R 2216 0 R 2218 0 R 2220 0 R 2222 0 R 2224 0 R 2227 0 R 2229 0 R 2231 0 R 2234 0 R 2236 0 R 2238 0 R 2242 0 R 2247 0 R 2256 0 R 2258 0 R 2260 0 R 2266 0 R 2268 0 R 2273 0 R 2275 0 R 2277 0 R 2284 0 R 2289 0 R 2293 0 R 2298 0 R 2302 0 R 2304 0 R 2306 0 R 2310 0 R 2313 0 R 2315 0 R 2317 0 R 2321 0 R 2323 0 R 2326 0 R 2328 0 R 2330 0 R 2332 0 R 2339 0 R 2342 0 R 2347 0 R 2349 0 R 2351 0 R 2353 0 R 2355 0 R 2363 0 R 2374 0 R 2388 0 R 2399 0 R 2403 0 R 2408 0 R 2412 0 R 2415 0 R 2420 0 R 2426 0 R 2428 0 R 2431 0 R 2433 0 R 2435 0 R 2437 0 R 2442 0 R 2444 0 R 2457 0 R 2460 0 R 2468 0 R 2474 0 R 2486 0 R 2500 0 R 2513 0 R 2532 0 R 2534 0 R 2536 0 R 2540 0 R 2558 0 R 2564 0 R 2576 0 R 2580 0 R 2584 0 R 2593 0 R 2605 0 R 2610 0 R 2620 0 R 2633 0 R 2652 
 0 R 2661 0 R 2664 0 R 2673 0 R 2690 0 R 2697 0 R 2700 0 R 2705 0 R 2709 0 R 2712 0 R 2721 0 R 2730 0 R 2733 0 R 2735 0 R 2739 0 R 2753 0 R 2762 0 R 2767 0 R 2771 0 R 2774 0 R 2776 0 R 2778 0 R 2780 0 R 2785 0 R 2798 0 R 2808 0 R 2816 0 R 2822 0 R 2827 0 R 2838 0 R 2845 0 R 2851 0 R 2853 0 R 2862 0 R 2870 0 R 2872 0 R 2876 0 R 2878 0 R 2889 0 R 2895 0 R 2897 0 R 2906 0 R 2909 0 R 2919 0 R 2923 0 R 2932 0 R 2940 0 R 2945 0 R 2949 0 R 2953 0 R 2955 0 R 2961 0 R 2965 0 R 2969 0 R 2975 0 R 2981 0 R 2984 0 R 2990 0 R 2994 0 R 3003 0 R 3008 0 R 3014 0 R 3024 0 R 3031 0 R 3038 0 R 3041 0 R 3044 0 R 3051 0 R 3056 0 R 3059 0 R 3064 0 R 3074 0 R 3079 0 R 3081 0 R 3085 0 R 3090 0 R 3095 0 R 3102 0 R 3109 0 R 3116 0 R 3124 0 R 3130 0 R 3135 0 R 3139 0 R 3142 0 R 3151 0 R 3156 0 R 3160 0 R 3168 0 R 3173 0 R 3177 0 R 3179 0 R 3190 0 R 3195 0 R 3198 0 R 3200 0 R 3202 0 R 3212 0 R 3219 0 R 3223 0 R 3226 0 R 3233 0 R 3237 0 R 3240 0 R 3244 0 R 3249 0 R 3257 0 R 3262 0 R 3267 0 R 3272 0 R 3274 0 R 327
 7 0 R 3279 0 R 3283 0 R 3294 0 R 3296 0 R 3300 0 R 3303 0 R 3307 0 R 3310 0 R 3314 0 R 3316 0 R 3329 0 R 3334 0 R 3339 0 R 3345 0 R 3353 0 R 3355 0 R 3363 0 R 3381 0 R 3393 0 R 3399 0 R 3415 0 R 3419 0 R 3424 0 R 3426 0 R 3433 0 R 3436 0 R 3441 0 R 3444 0 R 3446 0 R 3448 0 R 3450 0 R 3453 0 R 3471 0 R 3474 0 R 3479 0 R 3485 0 R 3495 0 R 3500 0 R 3510 0 R 3521 0 R 3528 0 R 3533 0 R 3539 0 R 3544 0 R 3547 0 R 3555 0 R 3559 0 R 3564 0 R 3569 0 R 3582 0 R 3585 0 R 3591 0 R 3597 0 R 3602 0 R 3612 0 R 3621 0 R 3627 0 R 3636 0 R 3645 0 R 3650 0 R 3656 0 R 3662 0 R 3667 0 R 3669 0 R 3675 0 R 3682 0 R 3684 0 R 3692 0 R 3694 0 R 3701 0 R 3709 0 R 3715 0 R 3724 0 R 3730 0 R 3741 0 R 3750 0 R 3762 0 R 3775 0 R 3778 0 R 3780 0 R 3784 0 R 3797 0 R 3803 0 R 3808 0 R 3814 0 R 3818 0 R 3821 0 R 3826 0 R 3828 0 R 3832 0 R 3834 0 R 3838 0 R 3841 0 R 3844 0 R 3852 0 R 3854 0 R 3860 0 R 3863 0 R 3869 0 R 3873 0 R 3876 0 R 3879 0 R 3882 0 R 3886 0 R 3889 0 R 3894 0 R 3899 0 R 3902 0 R 3910 0 R 3914 0 R 3
 918 0 R 3920 0 R 3923 0 R 3926 0 R 3931 0 R 3938 0 R 3944 0 R 3948 0 R 3951 0 R 3958 0 R 3966 0 R 3970 0 R 3973 0 R 3975 0 R 3979 0 R 3984 0 R 3989 0 R 3992 0 R 4001 0 R 4006 0 R 4010 0 R 4013 0 R 4021 0 R 4026 0 R 4034 0 R 4039 0 R 4041 0 R 4047 0 R 4049 0 R 4054 0 R 4058 0 R 4063 0 R 4067 0 R 4079 0 R 4095 0 R 4110 0 R 4115 0 R 4118 0 R 4121 0 R 4127 0 R 4132 0 R 4134 0 R 4136 0 R 4138 0 R 4140 0 R 4142 0 R 4151 0 R 4155 0 R 4159 0 R 4163 0 R 4165 0 R 4172 0 R 4182 0 R 4189 0 R 4192 0 R 4195 0 R 4197 0 R 4204 0 R 4211 0 R 4222 0 R 4226 0 R 4229 0 R 4233 0 R 4236 0 R 4242 0 R 4245 0 R 4260 0 R 4265 0 R 4288 0 R 4292 0 R 4299 0 R 4310 0 R 4319 0 R 4322 0 R 4325 0 R 4328 0 R 4344 0 R 4349 0 R 4356 0 R 4359 0 R 4362 0 R 4369 0 R 4374 0 R 4378 0 R 4380 0 R 4386 0 R 4393 0 R 4401 0 R 4405 0 R 4410 0 R 4415 0 R 4421 0 R 4428 0 R 4435 0 R 4441 0 R 4449 0 R 4457 0 R 4461 0 R 4470 0 R 4478 0 R 4484 0 R]
 >>
 endobj
 4 0 obj
@@ -187,11 +187,11 @@ endobj
 << /Type /Font
 /BaseFont /71be00+NotoSerif
 /Subtype /TrueType
-/FontDescriptor 5225 0 R
+/FontDescriptor 5240 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 5227 0 R
-/ToUnicode 5226 0 R
+/Widths 5242 0 R
+/ToUnicode 5241 0 R
 >>
 endobj
 11 0 obj
@@ -1750,7 +1750,7 @@ endobj
 /F1.0 10 0 R
 >>
 >>
-/Annots [4470 0 R 4471 0 R 4472 0 R 4473 0 R 4474 0 R 4475 0 R 4476 0 R 4477 0 R 4478 0 R 4479 0 R 4480 0 R 4481 0 R 4482 0 R 4483 0 R 4484 0 R 4485 0 R 4486 0 R 4487 0 R 4488 0 R 4489 0 R 4490 0 R 4491 0 R 4492 0 R 4493 0 R 4494 0 R 4495 0 R 4496 0 R 4497 0 R 4498 0 R 4499 0 R 4500 0 R 4501 0 R 4502 0 R 4503 0 R 4504 0 R 4505 0 R 4506 0 R 4507 0 R 4508 0 R 4509 0 R 4510 0 R 4511 0 R 4512 0 R 4513 0 R 4514 0 R 4515 0 R 4516 0 R 4517 0 R 4518 0 R 4519 0 R 4520 0 R 4521 0 R 4522 0 R 4523 0 R 4524 0 R 4525 0 R 4526 0 R 4527 0 R 4528 0 R 4529 0 R 4530 0 R 4531 0 R 4532 0 R 4533 0 R 4534 0 R 4535 0 R 4536 0 R 4537 0 R 4538 0 R 4539 0 R 4540 0 R 4541 0 R 4542 0 R 4543 0 R 4544 0 R 4545 0 R]
+/Annots [4485 0 R 4486 0 R 4487 0 R 4488 0 R 4489 0 R 4490 0 R 4491 0 R 4492 0 R 4493 0 R 4494 0 R 4495 0 R 4496 0 R 4497 0 R 4498 0 R 4499 0 R 4500 0 R 4501 0 R 4502 0 R 4503 0 R 4504 0 R 4505 0 R 4506 0 R 4507 0 R 4508 0 R 4509 0 R 4510 0 R 4511 0 R 4512 0 R 4513 0 R 4514 0 R 4515 0 R 4516 0 R 4517 0 R 4518 0 R 4519 0 R 4520 0 R 4521 0 R 4522 0 R 4523 0 R 4524 0 R 4525 0 R 4526 0 R 4527 0 R 4528 0 R 4529 0 R 4530 0 R 4531 0 R 4532 0 R 4533 0 R 4534 0 R 4535 0 R 4536 0 R 4537 0 R 4538 0 R 4539 0 R 4540 0 R 4541 0 R 4542 0 R 4543 0 R 4544 0 R 4545 0 R 4546 0 R 4547 0 R 4548 0 R 4549 0 R 4550 0 R 4551 0 R 4552 0 R 4553 0 R 4554 0 R 4555 0 R 4556 0 R 4557 0 R 4558 0 R 4559 0 R 4560 0 R]
 >>
 endobj
 13 0 obj
@@ -3417,7 +3417,7 @@ endobj
 /Font << /F1.0 10 0 R
 >>
 >>
-/Annots [4546 0 R 4547 0 R 4548 0 R 4549 0 R 4550 0 R 4551 0 R 4552 0 R 4553 0 R 4554 0 R 4555 0 R 4556 0 R 4557 0 R 4558 0 R 4559 0 R 4560 0 R 4561 0 R 4562 0 R 4563 0 R 4564 0 R 4565 0 R 4566 0 R 4567 0 R 4568 0 R 4569 0 R 4570 0 R 4571 0 R 4572 0 R 4573 0 R 4574 0 R 4575 0 R 4576 0 R 4577 0 R 4578 0 R 4579 0 R 4580 0 R 4581 0 R 4582 0 R 4583 0 R 4584 0 R 4585 0 R 4586 0 R 4587 0 R 4588 0 R 4589 0 R 4590 0 R 4591 0 R 4592 0 R 4593 0 R 4594 0 R 4595 0 R 4596 0 R 4597 0 R 4598 0 R 4599 0 R 4600 0 R 4601 0 R 4602 0 R 4603 0 R 4604 0 R 4605 0 R 4606 0 R 4607 0 R 4608 0 R 4609 0 R 4610 0 R 4611 0 R 4612 0 R 4613 0 R 4614 0 R 4615 0 R 4616 0 R 4617 0 R 4618 0 R 4619 0 R 4620 0 R 4621 0 R 4622 0 R 4623 0 R 4624 0 R 4625 0 R 4626 0 R 4627 0 R]
+/Annots [4561 0 R 4562 0 R 4563 0 R 4564 0 R 4565 0 R 4566 0 R 4567 0 R 4568 0 R 4569 0 R 4570 0 R 4571 0 R 4572 0 R 4573 0 R 4574 0 R 4575 0 R 4576 0 R 4577 0 R 4578 0 R 4579 0 R 4580 0 R 4581 0 R 4582 0 R 4583 0 R 4584 0 R 4585 0 R 4586 0 R 4587 0 R 4588 0 R 4589 0 R 4590 0 R 4591 0 R 4592 0 R 4593 0 R 4594 0 R 4595 0 R 4596 0 R 4597 0 R 4598 0 R 4599 0 R 4600 0 R 4601 0 R 4602 0 R 4603 0 R 4604 0 R 4605 0 R 4606 0 R 4607 0 R 4608 0 R 4609 0 R 4610 0 R 4611 0 R 4612 0 R 4613 0 R 4614 0 R 4615 0 R 4616 0 R 4617 0 R 4618 0 R 4619 0 R 4620 0 R 4621 0 R 4622 0 R 4623 0 R 4624 0 R 4625 0 R 4626 0 R 4627 0 R 4628 0 R 4629 0 R 4630 0 R 4631 0 R 4632 0 R 4633 0 R 4634 0 R 4635 0 R 4636 0 R 4637 0 R 4638 0 R 4639 0 R 4640 0 R 4641 0 R 4642 0 R]
 >>
 endobj
 15 0 obj
@@ -3502,7 +3502,7 @@ ET
 BT
 529.4315 776.196 Td
 /F1.0 10.5 Tf
-<323630> Tj
+<323632> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3542,7 +3542,7 @@ ET
 BT
 529.4315 757.716 Td
 /F1.0 10.5 Tf
-<323830> Tj
+<323832> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3582,7 +3582,7 @@ ET
 BT
 529.4315 739.236 Td
 /F1.0 10.5 Tf
-<333037> Tj
+<333039> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3622,7 +3622,7 @@ ET
 BT
 529.4315 720.756 Td
 /F1.0 10.5 Tf
-<333039> Tj
+<333131> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3662,7 +3662,7 @@ ET
 BT
 529.4315 702.276 Td
 /F1.0 10.5 Tf
-<333130> Tj
+<333132> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3702,7 +3702,7 @@ ET
 BT
 529.4315 683.796 Td
 /F1.0 10.5 Tf
-<333232> Tj
+<333234> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3742,7 +3742,7 @@ ET
 BT
 529.4315 665.316 Td
 /F1.0 10.5 Tf
-<333237> Tj
+<333239> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3782,7 +3782,7 @@ ET
 BT
 529.4315 646.836 Td
 /F1.0 10.5 Tf
-<333238> Tj
+<333330> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3822,7 +3822,7 @@ ET
 BT
 529.4315 628.356 Td
 /F1.0 10.5 Tf
-<333239> Tj
+<333331> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3862,7 +3862,7 @@ ET
 BT
 529.4315 609.876 Td
 /F1.0 10.5 Tf
-<333331> Tj
+<333333> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3902,7 +3902,7 @@ ET
 BT
 529.4315 591.396 Td
 /F1.0 10.5 Tf
-<333332> Tj
+<333334> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3942,7 +3942,7 @@ ET
 BT
 529.4315 572.916 Td
 /F1.0 10.5 Tf
-<333333> Tj
+<333335> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3982,7 +3982,7 @@ ET
 BT
 529.4315 554.436 Td
 /F1.0 10.5 Tf
-<333334> Tj
+<333336> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4022,7 +4022,7 @@ ET
 BT
 529.4315 535.956 Td
 /F1.0 10.5 Tf
-<333336> Tj
+<333338> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4062,7 +4062,7 @@ ET
 BT
 529.4315 517.476 Td
 /F1.0 10.5 Tf
-<333338> Tj
+<333430> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4102,7 +4102,7 @@ ET
 BT
 529.4315 498.996 Td
 /F1.0 10.5 Tf
-<333434> Tj
+<333436> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4142,7 +4142,7 @@ ET
 BT
 529.4315 480.516 Td
 /F1.0 10.5 Tf
-<333437> Tj
+<333439> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4182,7 +4182,7 @@ ET
 BT
 529.4315 462.036 Td
 /F1.0 10.5 Tf
-<333438> Tj
+<333530> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4222,7 +4222,7 @@ ET
 BT
 529.4315 443.556 Td
 /F1.0 10.5 Tf
-<333530> Tj
+<333532> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4262,7 +4262,7 @@ ET
 BT
 529.4315 425.076 Td
 /F1.0 10.5 Tf
-<333533> Tj
+<333535> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4302,7 +4302,7 @@ ET
 BT
 529.4315 406.596 Td
 /F1.0 10.5 Tf
-<333534> Tj
+<333536> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4342,7 +4342,7 @@ ET
 BT
 529.4315 388.116 Td
 /F1.0 10.5 Tf
-<333535> Tj
+<333537> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4382,7 +4382,7 @@ ET
 BT
 529.4315 369.636 Td
 /F1.0 10.5 Tf
-<333536> Tj
+<333538> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4422,7 +4422,7 @@ ET
 BT
 529.4315 351.156 Td
 /F1.0 10.5 Tf
-<333538> Tj
+<333630> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4462,7 +4462,7 @@ ET
 BT
 529.4315 332.676 Td
 /F1.0 10.5 Tf
-<333630> Tj
+<333632> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4502,7 +4502,7 @@ ET
 BT
 529.4315 314.196 Td
 /F1.0 10.5 Tf
-<333631> Tj
+<333633> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4542,7 +4542,7 @@ ET
 BT
 529.4315 295.716 Td
 /F1.0 10.5 Tf
-<333632> Tj
+<333634> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4582,7 +4582,7 @@ ET
 BT
 529.4315 277.236 Td
 /F1.0 10.5 Tf
-<333633> Tj
+<333635> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4622,7 +4622,7 @@ ET
 BT
 529.4315 258.756 Td
 /F1.0 10.5 Tf
-<333635> Tj
+<333637> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4662,7 +4662,7 @@ ET
 BT
 529.4315 240.276 Td
 /F1.0 10.5 Tf
-<333636> Tj
+<333638> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4702,7 +4702,7 @@ ET
 BT
 529.4315 221.796 Td
 /F1.0 10.5 Tf
-<333638> Tj
+<333730> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4742,7 +4742,7 @@ ET
 BT
 529.4315 203.316 Td
 /F1.0 10.5 Tf
-<333639> Tj
+<333731> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4782,7 +4782,7 @@ ET
 BT
 529.4315 184.836 Td
 /F1.0 10.5 Tf
-<333830> Tj
+<333832> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4822,7 +4822,7 @@ ET
 BT
 529.4315 166.356 Td
 /F1.0 10.5 Tf
-<333831> Tj
+<333833> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4862,7 +4862,7 @@ ET
 BT
 529.4315 147.876 Td
 /F1.0 10.5 Tf
-<333832> Tj
+<333834> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4902,7 +4902,7 @@ ET
 BT
 529.4315 129.396 Td
 /F1.0 10.5 Tf
-<333835> Tj
+<333837> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4942,7 +4942,7 @@ ET
 BT
 529.4315 110.916 Td
 /F1.0 10.5 Tf
-<333837> Tj
+<333839> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4982,7 +4982,7 @@ ET
 BT
 529.4315 92.436 Td
 /F1.0 10.5 Tf
-<333930> Tj
+<333932> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5022,7 +5022,7 @@ ET
 BT
 529.4315 73.956 Td
 /F1.0 10.5 Tf
-<333931> Tj
+<333933> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5062,7 +5062,7 @@ ET
 BT
 529.4315 55.476 Td
 /F1.0 10.5 Tf
-<333937> Tj
+<333939> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5084,7 +5084,7 @@ endobj
 /Font << /F1.0 10 0 R
 >>
 >>
-/Annots [4628 0 R 4629 0 R 4630 0 R 4631 0 R 4632 0 R 4633 0 R 4634 0 R 4635 0 R 4636 0 R 4637 0 R 4638 0 R 4639 0 R 4640 0 R 4641 0 R 4642 0 R 4643 0 R 4644 0 R 4645 0 R 4646 0 R 4647 0 R 4648 0 R 4649 0 R 4650 0 R 4651 0 R 4652 0 R 4653 0 R 4654 0 R 4655 0 R 4656 0 R 4657 0 R 4658 0 R 4659 0 R 4660 0 R 4661 0 R 4662 0 R 4663 0 R 4664 0 R 4665 0 R 4666 0 R 4667 0 R 4668 0 R 4669 0 R 4670 0 R 4671 0 R 4672 0 R 4673 0 R 4674 0 R 4675 0 R 4676 0 R 4677 0 R 4678 0 R 4679 0 R 4680 0 R 4681 0 R 4682 0 R 4683 0 R 4684 0 R 4685 0 R 4686 0 R 4687 0 R 4688 0 R 4689 0 R 4690 0 R 4691 0 R 4692 0 R 4693 0 R 4694 0 R 4695 0 R 4696 0 R 4697 0 R 4698 0 R 4699 0 R 4700 0 R 4701 0 R 4702 0 R 4703 0 R 4704 0 R 4705 0 R 4706 0 R 4707 0 R 4708 0 R 4709 0 R]
+/Annots [4643 0 R 4644 0 R 4645 0 R 4646 0 R 4647 0 R 4648 0 R 4649 0 R 4650 0 R 4651 0 R 4652 0 R 4653 0 R 4654 0 R 4655 0 R 4656 0 R 4657 0 R 4658 0 R 4659 0 R 4660 0 R 4661 0 R 4662 0 R 4663 0 R 4664 0 R 4665 0 R 4666 0 R 4667 0 R 4668 0 R 4669 0 R 4670 0 R 4671 0 R 4672 0 R 4673 0 R 4674 0 R 4675 0 R 4676 0 R 4677 0 R 4678 0 R 4679 0 R 4680 0 R 4681 0 R 4682 0 R 4683 0 R 4684 0 R 4685 0 R 4686 0 R 4687 0 R 4688 0 R 4689 0 R 4690 0 R 4691 0 R 4692 0 R 4693 0 R 4694 0 R 4695 0 R 4696 0 R 4697 0 R 4698 0 R 4699 0 R 4700 0 R 4701 0 R 4702 0 R 4703 0 R 4704 0 R 4705 0 R 4706 0 R 4707 0 R 4708 0 R 4709 0 R 4710 0 R 4711 0 R 4712 0 R 4713 0 R 4714 0 R 4715 0 R 4716 0 R 4717 0 R 4718 0 R 4719 0 R 4720 0 R 4721 0 R 4722 0 R 4723 0 R 4724 0 R]
 >>
 endobj
 17 0 obj
@@ -5129,7 +5129,7 @@ ET
 BT
 529.4315 794.676 Td
 /F1.0 10.5 Tf
-<333938> Tj
+<343030> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5169,7 +5169,7 @@ ET
 BT
 529.4315 776.196 Td
 /F1.0 10.5 Tf
-<343031> Tj
+<343033> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5209,7 +5209,7 @@ ET
 BT
 529.4315 757.716 Td
 /F1.0 10.5 Tf
-<343033> Tj
+<343035> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5249,7 +5249,7 @@ ET
 BT
 529.4315 739.236 Td
 /F1.0 10.5 Tf
-<343037> Tj
+<343039> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5289,7 +5289,7 @@ ET
 BT
 529.4315 720.756 Td
 /F1.0 10.5 Tf
-<343134> Tj
+<343136> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5329,7 +5329,7 @@ ET
 BT
 529.4315 702.276 Td
 /F1.0 10.5 Tf
-<343135> Tj
+<343137> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5369,7 +5369,7 @@ ET
 BT
 529.4315 683.796 Td
 /F1.0 10.5 Tf
-<343136> Tj
+<343138> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5409,7 +5409,7 @@ ET
 BT
 529.4315 665.316 Td
 /F1.0 10.5 Tf
-<343138> Tj
+<343230> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5449,7 +5449,7 @@ ET
 BT
 529.4315 646.836 Td
 /F1.0 10.5 Tf
-<343233> Tj
+<343235> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5489,7 +5489,7 @@ ET
 BT
 529.4315 628.356 Td
 /F1.0 10.5 Tf
-<343239> Tj
+<343331> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5529,7 +5529,7 @@ ET
 BT
 529.4315 609.876 Td
 /F1.0 10.5 Tf
-<343331> Tj
+<343333> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5569,7 +5569,7 @@ ET
 BT
 529.4315 591.396 Td
 /F1.0 10.5 Tf
-<343332> Tj
+<343334> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5609,7 +5609,7 @@ ET
 BT
 529.4315 572.916 Td
 /F1.0 10.5 Tf
-<343333> Tj
+<343335> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5649,7 +5649,7 @@ ET
 BT
 529.4315 554.436 Td
 /F1.0 10.5 Tf
-<343334> Tj
+<343336> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5689,7 +5689,7 @@ ET
 BT
 529.4315 535.956 Td
 /F1.0 10.5 Tf
-<343336> Tj
+<343338> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5729,7 +5729,7 @@ ET
 BT
 529.4315 517.476 Td
 /F1.0 10.5 Tf
-<343337> Tj
+<343339> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5769,7 +5769,7 @@ ET
 BT
 529.4315 498.996 Td
 /F1.0 10.5 Tf
-<343431> Tj
+<343433> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5809,7 +5809,7 @@ ET
 BT
 529.4315 480.516 Td
 /F1.0 10.5 Tf
-<343432> Tj
+<343434> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5849,7 +5849,7 @@ ET
 BT
 529.4315 462.036 Td
 /F1.0 10.5 Tf
-<343436> Tj
+<343438> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5889,7 +5889,7 @@ ET
 BT
 529.4315 443.556 Td
 /F1.0 10.5 Tf
-<343437> Tj
+<343439> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5929,7 +5929,7 @@ ET
 BT
 529.4315 425.076 Td
 /F1.0 10.5 Tf
-<343530> Tj
+<343532> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5969,7 +5969,7 @@ ET
 BT
 529.4315 406.596 Td
 /F1.0 10.5 Tf
-<343535> Tj
+<343537> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6009,7 +6009,7 @@ ET
 BT
 529.4315 388.116 Td
 /F1.0 10.5 Tf
-<343536> Tj
+<343538> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6049,7 +6049,7 @@ ET
 BT
 529.4315 369.636 Td
 /F1.0 10.5 Tf
-<343538> Tj
+<343630> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6089,7 +6089,7 @@ ET
 BT
 529.4315 351.156 Td
 /F1.0 10.5 Tf
-<343539> Tj
+<343631> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6129,7 +6129,7 @@ ET
 BT
 529.4315 332.676 Td
 /F1.0 10.5 Tf
-<343630> Tj
+<343632> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6169,7 +6169,7 @@ ET
 BT
 529.4315 314.196 Td
 /F1.0 10.5 Tf
-<343631> Tj
+<343633> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6209,7 +6209,7 @@ ET
 BT
 529.4315 295.716 Td
 /F1.0 10.5 Tf
-<343632> Tj
+<343634> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6249,7 +6249,7 @@ ET
 BT
 529.4315 277.236 Td
 /F1.0 10.5 Tf
-<343633> Tj
+<343635> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6289,7 +6289,7 @@ ET
 BT
 529.4315 258.756 Td
 /F1.0 10.5 Tf
-<343637> Tj
+<343639> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6329,7 +6329,7 @@ ET
 BT
 529.4315 240.276 Td
 /F1.0 10.5 Tf
-<343638> Tj
+<343730> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6369,7 +6369,7 @@ ET
 BT
 529.4315 221.796 Td
 /F1.0 10.5 Tf
-<343736> Tj
+<343738> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6409,7 +6409,7 @@ ET
 BT
 529.4315 203.316 Td
 /F1.0 10.5 Tf
-<343830> Tj
+<343832> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6449,7 +6449,7 @@ ET
 BT
 529.4315 184.836 Td
 /F1.0 10.5 Tf
-<343832> Tj
+<343834> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6489,7 +6489,7 @@ ET
 BT
 529.4315 166.356 Td
 /F1.0 10.5 Tf
-<343835> Tj
+<343837> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6529,7 +6529,7 @@ ET
 BT
 529.4315 147.876 Td
 /F1.0 10.5 Tf
-<343836> Tj
+<343838> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6569,7 +6569,7 @@ ET
 BT
 529.4315 129.396 Td
 /F1.0 10.5 Tf
-<343935> Tj
+<343937> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6609,7 +6609,7 @@ ET
 BT
 529.4315 110.916 Td
 /F1.0 10.5 Tf
-<343937> Tj
+<343939> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6649,7 +6649,7 @@ ET
 BT
 529.4315 92.436 Td
 /F1.0 10.5 Tf
-<343938> Tj
+<353030> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6689,7 +6689,7 @@ ET
 BT
 529.4315 73.956 Td
 /F1.0 10.5 Tf
-<343939> Tj
+<353031> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6729,7 +6729,7 @@ ET
 BT
 529.4315 55.476 Td
 /F1.0 10.5 Tf
-<353030> Tj
+<353032> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6751,7 +6751,7 @@ endobj
 /Font << /F1.0 10 0 R
 >>
 >>
-/Annots [4710 0 R 4711 0 R 4712 0 R 4713 0 R 4714 0 R 4715 0 R 4716 0 R 4717 0 R 4718 0 R 4719 0 R 4720 0 R 4721 0 R 4722 0 R 4723 0 R 4724 0 R 4725 0 R 4726 0 R 4727 0 R 4728 0 R 4729 0 R 4730 0 R 4731 0 R 4732 0 R 4733 0 R 4734 0 R 4735 0 R 4736 0 R 4737 0 R 4738 0 R 4739 0 R 4740 0 R 4741 0 R 4742 0 R 4743 0 R 4744 0 R 4745 0 R 4746 0 R 4747 0 R 4748 0 R 4749 0 R 4750 0 R 4751 0 R 4752 0 R 4753 0 R 4754 0 R 4755 0 R 4756 0 R 4757 0 R 4758 0 R 4759 0 R 4760 0 R 4761 0 R 4762 0 R 4763 0 R 4764 0 R 4765 0 R 4766 0 R 4767 0 R 4768 0 R 4769 0 R 4770 0 R 4771 0 R 4772 0 R 4773 0 R 4774 0 R 4775 0 R 4776 0 R 4777 0 R 4778 0 R 4779 0 R 4780 0 R 4781 0 R 4782 0 R 4783 0 R 4784 0 R 4785 0 R 4786 0 R 4787 0 R 4788 0 R 4789 0 R 4790 0 R 4791 0 R]
+/Annots [4725 0 R 4726 0 R 4727 0 R 4728 0 R 4729 0 R 4730 0 R 4731 0 R 4732 0 R 4733 0 R 4734 0 R 4735 0 R 4736 0 R 4737 0 R 4738 0 R 4739 0 R 4740 0 R 4741 0 R 4742 0 R 4743 0 R 4744 0 R 4745 0 R 4746 0 R 4747 0 R 4748 0 R 4749 0 R 4750 0 R 4751 0 R 4752 0 R 4753 0 R 4754 0 R 4755 0 R 4756 0 R 4757 0 R 4758 0 R 4759 0 R 4760 0 R 4761 0 R 4762 0 R 4763 0 R 4764 0 R 4765 0 R 4766 0 R 4767 0 R 4768 0 R 4769 0 R 4770 0 R 4771 0 R 4772 0 R 4773 0 R 4774 0 R 4775 0 R 4776 0 R 4777 0 R 4778 0 R 4779 0 R 4780 0 R 4781 0 R 4782 0 R 4783 0 R 4784 0 R 4785 0 R 4786 0 R 4787 0 R 4788 0 R 4789 0 R 4790 0 R 4791 0 R 4792 0 R 4793 0 R 4794 0 R 4795 0 R 4796 0 R 4797 0 R 4798 0 R 4799 0 R 4800 0 R 4801 0 R 4802 0 R 4803 0 R 4804 0 R 4805 0 R 4806 0 R]
 >>
 endobj
 19 0 obj
@@ -6796,7 +6796,7 @@ ET
 BT
 529.4315 794.676 Td
 /F1.0 10.5 Tf
-<353033> Tj
+<353035> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6836,7 +6836,7 @@ ET
 BT
 529.4315 776.196 Td
 /F1.0 10.5 Tf
-<353034> Tj
+<353036> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6876,7 +6876,7 @@ ET
 BT
 529.4315 757.716 Td
 /F1.0 10.5 Tf
-<353035> Tj
+<353037> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6916,7 +6916,7 @@ ET
 BT
 529.4315 739.236 Td
 /F1.0 10.5 Tf
-<353036> Tj
+<353038> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6956,7 +6956,7 @@ ET
 BT
 529.4315 720.756 Td
 /F1.0 10.5 Tf
-<353037> Tj
+<353039> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6996,7 +6996,7 @@ ET
 BT
 529.4315 702.276 Td
 /F1.0 10.5 Tf
-<353038> Tj
+<353130> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7036,7 +7036,7 @@ ET
 BT
 529.4315 683.796 Td
 /F1.0 10.5 Tf
-<353039> Tj
+<353131> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7076,7 +7076,7 @@ ET
 BT
 529.4315 665.316 Td
 /F1.0 10.5 Tf
-<353130> Tj
+<353132> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7116,7 +7116,7 @@ ET
 BT
 529.4315 646.836 Td
 /F1.0 10.5 Tf
-<353131> Tj
+<353133> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7156,7 +7156,7 @@ ET
 BT
 529.4315 628.356 Td
 /F1.0 10.5 Tf
-<353135> Tj
+<353137> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7196,7 +7196,7 @@ ET
 BT
 529.4315 609.876 Td
 /F1.0 10.5 Tf
-<353136> Tj
+<353138> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7236,7 +7236,7 @@ ET
 BT
 529.4315 591.396 Td
 /F1.0 10.5 Tf
-<353337> Tj
+<353339> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7276,7 +7276,7 @@ ET
 BT
 529.4315 572.916 Td
 /F1.0 10.5 Tf
-<353338> Tj
+<353430> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7316,7 +7316,7 @@ ET
 BT
 529.4315 554.436 Td
 /F1.0 10.5 Tf
-<353434> Tj
+<353436> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7356,7 +7356,7 @@ ET
 BT
 529.4315 535.956 Td
 /F1.0 10.5 Tf
-<353439> Tj
+<353531> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7396,7 +7396,7 @@ ET
 BT
 529.4315 517.476 Td
 /F1.0 10.5 Tf
-<353533> Tj
+<353535> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7436,7 +7436,7 @@ ET
 BT
 529.4315 498.996 Td
 /F1.0 10.5 Tf
-<353636> Tj
+<353638> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7476,7 +7476,7 @@ ET
 BT
 529.4315 480.516 Td
 /F1.0 10.5 Tf
-<353734> Tj
+<353736> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7516,7 +7516,7 @@ ET
 BT
 529.4315 462.036 Td
 /F1.0 10.5 Tf
-<353736> Tj
+<353738> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7556,7 +7556,7 @@ ET
 BT
 529.4315 443.556 Td
 /F1.0 10.5 Tf
-<353830> Tj
+<353832> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7596,7 +7596,7 @@ ET
 BT
 529.4315 425.076 Td
 /F1.0 10.5 Tf
-<353831> Tj
+<353833> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7636,7 +7636,7 @@ ET
 BT
 529.4315 406.596 Td
 /F1.0 10.5 Tf
-<353835> Tj
+<353837> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7676,7 +7676,7 @@ ET
 BT
 529.4315 388.116 Td
 /F1.0 10.5 Tf
-<353836> Tj
+<353838> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7716,7 +7716,7 @@ ET
 BT
 529.4315 369.636 Td
 /F1.0 10.5 Tf
-<353930> Tj
+<353932> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7756,7 +7756,7 @@ ET
 BT
 529.4315 351.156 Td
 /F1.0 10.5 Tf
-<353935> Tj
+<353937> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7796,7 +7796,7 @@ ET
 BT
 529.4315 332.676 Td
 /F1.0 10.5 Tf
-<353936> Tj
+<353938> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7836,7 +7836,7 @@ ET
 BT
 529.4315 314.196 Td
 /F1.0 10.5 Tf
-<353939> Tj
+<363031> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7876,7 +7876,7 @@ ET
 BT
 529.4315 295.716 Td
 /F1.0 10.5 Tf
-<363030> Tj
+<363032> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7916,7 +7916,7 @@ ET
 BT
 529.4315 277.236 Td
 /F1.0 10.5 Tf
-<363033> Tj
+<363035> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7956,7 +7956,7 @@ ET
 BT
 529.4315 258.756 Td
 /F1.0 10.5 Tf
-<363037> Tj
+<363039> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7996,7 +7996,7 @@ ET
 BT
 529.4315 240.276 Td
 /F1.0 10.5 Tf
-<363135> Tj
+<363137> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8036,7 +8036,7 @@ ET
 BT
 529.4315 221.796 Td
 /F1.0 10.5 Tf
-<363136> Tj
+<363138> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8076,7 +8076,7 @@ ET
 BT
 529.4315 203.316 Td
 /F1.0 10.5 Tf
-<363137> Tj
+<363139> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8127,7 +8127,7 @@ ET
 BT
 529.4315 184.836 Td
 /F1.0 10.5 Tf
-<363138> Tj
+<363230> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8167,7 +8167,7 @@ ET
 BT
 529.4315 166.356 Td
 /F1.0 10.5 Tf
-<363139> Tj
+<363231> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8207,7 +8207,7 @@ ET
 BT
 529.4315 147.876 Td
 /F1.0 10.5 Tf
-<363333> Tj
+<363335> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8247,7 +8247,7 @@ ET
 BT
 529.4315 129.396 Td
 /F1.0 10.5 Tf
-<363438> Tj
+<363530> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8287,7 +8287,7 @@ ET
 BT
 529.4315 110.916 Td
 /F1.0 10.5 Tf
-<363439> Tj
+<363531> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8327,7 +8327,7 @@ ET
 BT
 529.4315 92.436 Td
 /F1.0 10.5 Tf
-<363531> Tj
+<363533> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8367,7 +8367,7 @@ ET
 BT
 529.4315 73.956 Td
 /F1.0 10.5 Tf
-<363533> Tj
+<363535> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8407,7 +8407,7 @@ ET
 BT
 529.4315 55.476 Td
 /F1.0 10.5 Tf
-<363535> Tj
+<363537> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8429,7 +8429,7 @@ endobj
 /Font << /F1.0 10 0 R
 >>
 >>
-/Annots [4792 0 R 4793 0 R 4794 0 R 4795 0 R 4796 0 R 4797 0 R 4798 0 R 4799 0 R 4800 0 R 4801 0 R 4802 0 R 4803 0 R 4804 0 R 4805 0 R 4806 0 R 4807 0 R 4808 0 R 4809 0 R 4810 0 R 4811 0 R 4812 0 R 4813 0 R 4814 0 R 4815 0 R 4816 0 R 4817 0 R 4818 0 R 4819 0 R 4820 0 R 4821 0 R 4822 0 R 4823 0 R 4824 0 R 4825 0 R 4826 0 R 4827 0 R 4828 0 R 4829 0 R 4830 0 R 4831 0 R 4832 0 R 4833 0 R 4834 0 R 4835 0 R 4836 0 R 4837 0 R 4838 0 R 4839 0 R 4840 0 R 4841 0 R 4842 0 R 4843 0 R 4844 0 R 4845 0 R 4846 0 R 4847 0 R 4848 0 R 4849 0 R 4850 0 R 4851 0 R 4852 0 R 4853 0 R 4854 0 R 4855 0 R 4856 0 R 4857 0 R 4858 0 R 4859 0 R 4860 0 R 4861 0 R 4862 0 R 4863 0 R 4864 0 R 4865 0 R 4866 0 R 4867 0 R 4868 0 R 4869 0 R 4870 0 R 4871 0 R 4872 0 R 4873 0 R 4874 0 R 4875 0 R]
+/Annots [4807 0 R 4808 0 R 4809 0 R 4810 0 R 4811 0 R 4812 0 R 4813 0 R 4814 0 R 4815 0 R 4816 0 R 4817 0 R 4818 0 R 4819 0 R 4820 0 R 4821 0 R 4822 0 R 4823 0 R 4824 0 R 4825 0 R 4826 0 R 4827 0 R 4828 0 R 4829 0 R 4830 0 R 4831 0 R 4832 0 R 4833 0 R 4834 0 R 4835 0 R 4836 0 R 4837 0 R 4838 0 R 4839 0 R 4840 0 R 4841 0 R 4842 0 R 4843 0 R 4844 0 R 4845 0 R 4846 0 R 4847 0 R 4848 0 R 4849 0 R 4850 0 R 4851 0 R 4852 0 R 4853 0 R 4854 0 R 4855 0 R 4856 0 R 4857 0 R 4858 0 R 4859 0 R 4860 0 R 4861 0 R 4862 0 R 4863 0 R 4864 0 R 4865 0 R 4866 0 R 4867 0 R 4868 0 R 4869 0 R 4870 0 R 4871 0 R 4872 0 R 4873 0 R 4874 0 R 4875 0 R 4876 0 R 4877 0 R 4878 0 R 4879 0 R 4880 0 R 4881 0 R 4882 0 R 4883 0 R 4884 0 R 4885 0 R 4886 0 R 4887 0 R 4888 0 R 4889 0 R 4890 0 R]
 >>
 endobj
 21 0 obj
@@ -8474,7 +8474,7 @@ ET
 BT
 529.4315 794.676 Td
 /F1.0 10.5 Tf
-<363537> Tj
+<363539> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8514,7 +8514,7 @@ ET
 BT
 529.4315 776.196 Td
 /F1.0 10.5 Tf
-<363538> Tj
+<363630> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8565,7 +8565,7 @@ ET
 BT
 529.4315 757.716 Td
 /F1.0 10.5 Tf
-<363630> Tj
+<363632> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8605,7 +8605,7 @@ ET
 BT
 529.4315 739.236 Td
 /F1.0 10.5 Tf
-<363631> Tj
+<363633> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8645,7 +8645,7 @@ ET
 BT
 529.4315 720.756 Td
 /F1.0 10.5 Tf
-<363634> Tj
+<363636> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8685,7 +8685,7 @@ ET
 BT
 529.4315 702.276 Td
 /F1.0 10.5 Tf
-<363635> Tj
+<363637> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8725,7 +8725,7 @@ ET
 BT
 529.4315 683.796 Td
 /F1.0 10.5 Tf
-<363636> Tj
+<363638> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8765,7 +8765,7 @@ ET
 BT
 529.4315 665.316 Td
 /F1.0 10.5 Tf
-<363637> Tj
+<363639> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8805,7 +8805,7 @@ ET
 BT
 529.4315 646.836 Td
 /F1.0 10.5 Tf
-<363638> Tj
+<363730> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8845,7 +8845,7 @@ ET
 BT
 529.4315 628.356 Td
 /F1.0 10.5 Tf
-<363639> Tj
+<363731> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8885,7 +8885,7 @@ ET
 BT
 529.4315 609.876 Td
 /F1.0 10.5 Tf
-<363730> Tj
+<363732> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8925,7 +8925,7 @@ ET
 BT
 529.4315 591.396 Td
 /F1.0 10.5 Tf
-<363731> Tj
+<363733> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8965,7 +8965,7 @@ ET
 BT
 529.4315 572.916 Td
 /F1.0 10.5 Tf
-<363732> Tj
+<363734> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9005,7 +9005,7 @@ ET
 BT
 529.4315 554.436 Td
 /F1.0 10.5 Tf
-<363733> Tj
+<363735> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9045,7 +9045,7 @@ ET
 BT
 529.4315 535.956 Td
 /F1.0 10.5 Tf
-<363734> Tj
+<363736> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9085,7 +9085,7 @@ ET
 BT
 529.4315 517.476 Td
 /F1.0 10.5 Tf
-<363736> Tj
+<363738> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9125,7 +9125,7 @@ ET
 BT
 529.4315 498.996 Td
 /F1.0 10.5 Tf
-<363737> Tj
+<363739> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9165,7 +9165,7 @@ ET
 BT
 529.4315 480.516 Td
 /F1.0 10.5 Tf
-<363738> Tj
+<363830> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9205,7 +9205,7 @@ ET
 BT
 529.4315 462.036 Td
 /F1.0 10.5 Tf
-<363739> Tj
+<363831> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9245,7 +9245,7 @@ ET
 BT
 529.4315 443.556 Td
 /F1.0 10.5 Tf
-<363830> Tj
+<363832> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9285,7 +9285,7 @@ ET
 BT
 529.4315 425.076 Td
 /F1.0 10.5 Tf
-<363831> Tj
+<363833> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9325,7 +9325,7 @@ ET
 BT
 529.4315 406.596 Td
 /F1.0 10.5 Tf
-<363833> Tj
+<363835> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9365,7 +9365,7 @@ ET
 BT
 529.4315 388.116 Td
 /F1.0 10.5 Tf
-<363834> Tj
+<363836> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9405,7 +9405,7 @@ ET
 BT
 529.4315 369.636 Td
 /F1.0 10.5 Tf
-<363931> Tj
+<363933> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9445,7 +9445,7 @@ ET
 BT
 529.4315 351.156 Td
 /F1.0 10.5 Tf
-<363932> Tj
+<363934> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9485,7 +9485,7 @@ ET
 BT
 529.4315 332.676 Td
 /F1.0 10.5 Tf
-<363933> Tj
+<363935> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9525,7 +9525,7 @@ ET
 BT
 529.4315 314.196 Td
 /F1.0 10.5 Tf
-<363934> Tj
+<363936> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9565,7 +9565,7 @@ ET
 BT
 529.4315 295.716 Td
 /F1.0 10.5 Tf
-<363935> Tj
+<363937> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9605,7 +9605,7 @@ ET
 BT
 529.4315 277.236 Td
 /F1.0 10.5 Tf
-<363936> Tj
+<363938> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9645,7 +9645,7 @@ ET
 BT
 529.4315 258.756 Td
 /F1.0 10.5 Tf
-<373037> Tj
+<373039> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9685,7 +9685,7 @@ ET
 BT
 529.4315 240.276 Td
 /F1.0 10.5 Tf
-<373130> Tj
+<373132> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9725,7 +9725,7 @@ ET
 BT
 529.4315 221.796 Td
 /F1.0 10.5 Tf
-<373134> Tj
+<373136> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9765,7 +9765,7 @@ ET
 BT
 529.4315 203.316 Td
 /F1.0 10.5 Tf
-<373230> Tj
+<373232> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9805,7 +9805,7 @@ ET
 BT
 529.4315 184.836 Td
 /F1.0 10.5 Tf
-<373331> Tj
+<373333> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9845,7 +9845,7 @@ ET
 BT
 529.4315 166.356 Td
 /F1.0 10.5 Tf
-<373332> Tj
+<373334> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9885,7 +9885,7 @@ ET
 BT
 529.4315 147.876 Td
 /F1.0 10.5 Tf
-<373333> Tj
+<373335> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9925,7 +9925,7 @@ ET
 BT
 529.4315 129.396 Td
 /F1.0 10.5 Tf
-<373432> Tj
+<373434> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9965,7 +9965,7 @@ ET
 BT
 529.4315 110.916 Td
 /F1.0 10.5 Tf
-<373433> Tj
+<373435> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -10005,7 +10005,7 @@ ET
 BT
 529.4315 92.436 Td
 /F1.0 10.5 Tf
-<373434> Tj
+<373436> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -10045,7 +10045,7 @@ ET
 BT
 529.4315 73.956 Td
 /F1.0 10.5 Tf
-<373435> Tj
+<373437> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -10085,7 +10085,7 @@ ET
 BT
 529.4315 55.476 Td
 /F1.0 10.5 Tf
-<373436> Tj
+<373438> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -10107,7 +10107,7 @@ endobj
 /Font << /F1.0 10 0 R
 >>
 >>
-/Annots [4876 0 R 4877 0 R 4878 0 R 4879 0 R 4880 0 R 4881 0 R 4882 0 R 4883 0 R 4884 0 R 4885 0 R 4886 0 R 4887 0 R 4888 0 R 4889 0 R 4890 0 R 4891 0 R 4892 0 R 4893 0 R 4894 0 R 4895 0 R 4896 0 R 4897 0 R 4898 0 R 4899 0 R 4900 0 R 4901 0 R 4902 0 R 4903 0 R 4904 0 R 4905 0 R 4906 0 R 4907 0 R 4908 0 R 4909 0 R 4910 0 R 4911 0 R 4912 0 R 4913 0 R 4914 0 R 4915 0 R 4916 0 R 4917 0 R 4918 0 R 4919 0 R 4920 0 R 4921 0 R 4922 0 R 4923 0 R 4924 0 R 4925 0 R 4926 0 R 4927 0 R 4928 0 R 4929 0 R 4930 0 R 4931 0 R 4932 0 R 4933 0 R 4934 0 R 4935 0 R 4936 0 R 4937 0 R 4938 0 R 4939 0 R 4940 0 R 4941 0 R 4942 0 R 4943 0 R 4944 0 R 4945 0 R 4946 0 R 4947 0 R 4948 0 R 4949 0 R 4950 0 R 4951 0 R 4952 0 R 4953 0 R 4954 0 R 4955 0 R 4956 0 R 4957 0 R 4958 0 R 4959 0 R]
+/Annots [4891 0 R 4892 0 R 4893 0 R 4894 0 R 4895 0 R 4896 0 R 4897 0 R 4898 0 R 4899 0 R 4900 0 R 4901 0 R 4902 0 R 4903 0 R 4904 0 R 4905 0 R 4906 0 R 4907 0 R 4908 0 R 4909 0 R 4910 0 R 4911 0 R 4912 0 R 4913 0 R 4914 0 R 4915 0 R 4916 0 R 4917 0 R 4918 0 R 4919 0 R 4920 0 R 4921 0 R 4922 0 R 4923 0 R 4924 0 R 4925 0 R 4926 0 R 4927 0 R 4928 0 R 4929 0 R 4930 0 R 4931 0 R 4932 0 R 4933 0 R 4934 0 R 4935 0 R 4936 0 R 4937 0 R 4938 0 R 4939 0 R 4940 0 R 4941 0 R 4942 0 R 4943 0 R 4944 0 R 4945 0 R 4946 0 R 4947 0 R 4948 0 R 4949 0 R 4950 0 R 4951 0 R 4952 0 R 4953 0 R 4954 0 R 4955 0 R 4956 0 R 4957 0 R 4958 0 R 4959 0 R 4960 0 R 4961 0 R 4962 0 R 4963 0 R 4964 0 R 4965 0 R 4966 0 R 4967 0 R 4968 0 R 4969 0 R 4970 0 R 4971 0 R 4972 0 R 4973 0 R 4974 0 R]
 >>
 endobj
 23 0 obj
@@ -10152,7 +10152,7 @@ ET
 BT
 529.4315 794.676 Td
 /F1.0 10.5 Tf
-<373438> Tj
+<373530> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -10192,7 +10192,7 @@ ET
 BT
 529.4315 776.196 Td
 /F1.0 10.5 Tf
-<373439> Tj
+<373531> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -10232,7 +10232,7 @@ ET
 BT
 529.4315 757.716 Td
 /F1.0 10.5 Tf
-<373530> Tj
+<373532> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -10272,7 +10272,7 @@ ET
 BT
 529.4315 739.236 Td
 /F1.0 10.5 Tf
-<373534> Tj
+<373536> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -10312,7 +10312,7 @@ ET
 BT
 529.4315 720.756 Td
 /F1.0 10.5 Tf
-<373535> Tj
+<373537> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -10334,7 +10334,7 @@ endobj
 /Font << /F1.0 10 0 R
 >>
 >>
-/Annots [4960 0 R 4961 0 R 4962 0 R 4963 0 R 4964 0 R 4965 0 R 4966 0 R 4967 0 R 4968 0 R 4969 0 R]
+/Annots [4975 0 R 4976 0 R 4977 0 R 4978 0 R 4979 0 R 4980 0 R 4981 0 R 4982 0 R 4983 0 R 4984 0 R]
 >>
 endobj
 25 0 obj
@@ -11146,7 +11146,7 @@ endobj
 /F4.0 37 0 R
 /F1.1 40 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [32 0 R 33 0 R 34 0 R 36 0 R 38 0 R 39 0 R 41 0 R 42 0 R 43 0 R]
@@ -11161,7 +11161,7 @@ endobj
 >>
 endobj
 29 0 obj
-<< /Kids [650 0 R 3717 0 R 1954 0 R 651 0 R 4170 0 R 1181 0 R 2585 0 R 3943 0 R]
+<< /Kids [650 0 R 3735 0 R 1970 0 R 651 0 R 3665 0 R 1181 0 R 2600 0 R 3961 0 R]
 >>
 endobj
 30 0 obj
@@ -11171,11 +11171,11 @@ endobj
 << /Type /Font
 /BaseFont /309344+NotoSerif-Bold
 /Subtype /TrueType
-/FontDescriptor 5229 0 R
+/FontDescriptor 5244 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 5231 0 R
-/ToUnicode 5230 0 R
+/Widths 5246 0 R
+/ToUnicode 5245 0 R
 >>
 endobj
 32 0 obj
@@ -11215,11 +11215,11 @@ endobj
 << /Type /Font
 /BaseFont /fbe45d+NotoSerif-Italic
 /Subtype /TrueType
-/FontDescriptor 5233 0 R
+/FontDescriptor 5248 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 5235 0 R
-/ToUnicode 5234 0 R
+/Widths 5250 0 R
+/ToUnicode 5249 0 R
 >>
 endobj
 36 0 obj
@@ -11237,11 +11237,11 @@ endobj
 << /Type /Font
 /BaseFont /c7d210+mplus1mn-regular
 /Subtype /TrueType
-/FontDescriptor 5237 0 R
+/FontDescriptor 5252 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 5239 0 R
-/ToUnicode 5238 0 R
+/Widths 5254 0 R
+/ToUnicode 5253 0 R
 >>
 endobj
 38 0 obj
@@ -11267,11 +11267,11 @@ endobj
 << /Type /Font
 /BaseFont /6bc580+NotoSerif
 /Subtype /TrueType
-/FontDescriptor 5241 0 R
+/FontDescriptor 5256 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 5243 0 R
-/ToUnicode 5242 0 R
+/Widths 5258 0 R
+/ToUnicode 5257 0 R
 >>
 endobj
 41 0 obj
@@ -11705,7 +11705,7 @@ endobj
 /F5.1 47 0 R
 /F2.0 31 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -11714,11 +11714,11 @@ endobj
 << /Type /Font
 /BaseFont /26ec65+FontAwesome
 /Subtype /TrueType
-/FontDescriptor 5245 0 R
+/FontDescriptor 5260 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 5247 0 R
-/ToUnicode 5246 0 R
+/Widths 5262 0 R
+/ToUnicode 5261 0 R
 >>
 endobj
 48 0 obj
@@ -11776,7 +11776,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -11861,7 +11861,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [54 0 R]
@@ -13434,7 +13434,7 @@ endobj
 /F1.1 40 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [59 0 R 61 0 R]
@@ -14554,7 +14554,7 @@ endobj
 /F1.0 10 0 R
 /F5.1 47 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -15433,7 +15433,7 @@ endobj
 /F3.0 35 0 R
 /F5.1 47 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [66 0 R]
@@ -16223,7 +16223,7 @@ endobj
 /Font << /F4.0 37 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -16942,7 +16942,7 @@ endobj
 /Font << /F4.0 37 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -17818,7 +17818,7 @@ endobj
 /F2.0 31 0 R
 /F5.1 47 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [75 0 R 76 0 R 77 0 R 78 0 R]
@@ -18808,7 +18808,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [81 0 R]
@@ -19731,7 +19731,7 @@ endobj
 /F4.0 37 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -21242,7 +21242,7 @@ endobj
 /F4.0 37 0 R
 /F5.1 47 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [86 0 R 88 0 R]
@@ -22590,7 +22590,7 @@ endobj
 /F4.0 37 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [92 0 R]
@@ -23664,7 +23664,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -24387,7 +24387,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -25026,7 +25026,7 @@ endobj
 /F4.0 37 0 R
 /F2.0 31 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [99 0 R 100 0 R 101 0 R 103 0 R]
@@ -25270,7 +25270,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [107 0 R 108 0 R 109 0 R 110 0 R]
@@ -26142,7 +26142,7 @@ endobj
 /F4.0 37 0 R
 /F5.1 47 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [114 0 R 115 0 R]
@@ -26422,7 +26422,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -27902,7 +27902,7 @@ endobj
 /F5.1 47 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [122 0 R 123 0 R 124 0 R 125 0 R 126 0 R 127 0 R 128 0 R 130 0 R 131 0 R 132 0 R 133 0 R]
@@ -28942,7 +28942,7 @@ endobj
 /F2.0 31 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [136 0 R 138 0 R]
@@ -29695,7 +29695,7 @@ endobj
 /F2.0 31 0 R
 /F5.1 47 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [141 0 R 143 0 R 144 0 R 145 0 R 146 0 R]
@@ -33665,7 +33665,7 @@ endobj
 /F2.0 31 0 R
 /F5.1 47 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [149 0 R 150 0 R 151 0 R 152 0 R 153 0 R 154 0 R 155 0 R]
@@ -34622,7 +34622,7 @@ endobj
 /F2.0 31 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [158 0 R 159 0 R 163 0 R]
@@ -34655,12 +34655,12 @@ endobj
 endobj
 161 0 obj
 << /Limits [(__anchor-top) (adding.new.node)]
-/Names [(__anchor-top) 27 0 R (__indexterm-2002) 3614 0 R (__indexterm-2004) 3616 0 R (__indexterm-2006) 3618 0 R (__indexterm-2008) 3621 0 R (accesscontrolclient-changes) 4467 0 R (acid) 954 0 R (acl) 3419 0 R (add-metric-name-and-function-to-hadoop-compat-interface) 3715 0 R (add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3718 0 R (add.metrics) 3713 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 4046 0 R (adding.new.node) 3160 0 R]
+/Names [(__anchor-top) 27 0 R (__indexterm-2002) 3630 0 R (__indexterm-2004) 3632 0 R (__indexterm-2006) 3634 0 R (__indexterm-2008) 3637 0 R (accesscontrolclient-changes) 4482 0 R (acid) 954 0 R (acl) 3434 0 R (add-metric-name-and-function-to-hadoop-compat-interface) 3733 0 R (add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3736 0 R (add.metrics) 3731 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 4064 0 R (adding.new.node) 3175 0 R]
 >>
 endobj
 162 0 obj
 << /Limits [(io.storefile.bloom.block.size) (jdk-version-requirements)]
-/Names [(io.storefile.bloom.block.size) 365 0 R (ipc) 4375 0 R (irbrc) 817 0 R (irc) 3450 0 R (isolate-system-tables) 3413 0 R (java) 121 0 R (java-2) 1957 0 R (java-3) 1962 0 R (java.client.config) 525 0 R (jdk-issues) 2961 0 R (jdk-version-requirements) 58 0 R]
+/Names [(io.storefile.bloom.block.size) 365 0 R (ipc) 4390 0 R (irbrc) 817 0 R (irc) 3465 0 R (isolate-system-tables) 3428 0 R (java) 121 0 R (java-2) 1973 0 R (java-3) 1978 0 R (java.client.config) 525 0 R (jdk-issues) 2976 0 R (jdk-version-requirements) 58 0 R]
 >>
 endobj
 163 0 obj
@@ -34782,7 +34782,7 @@ endobj
 /F1.0 10 0 R
 /F2.0 31 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [166 0 R]
@@ -35916,7 +35916,7 @@ endobj
 /F4.0 37 0 R
 /F1.1 40 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [171 0 R 172 0 R 174 0 R]
@@ -36716,7 +36716,7 @@ endobj
 /F5.1 47 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [179 0 R 180 0 R 181 0 R 183 0 R 184 0 R 186 0 R 187 0 R]
@@ -38188,7 +38188,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [190 0 R 191 0 R]
@@ -38613,7 +38613,7 @@ endobj
 /F4.0 37 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -39269,7 +39269,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [197 0 R]
@@ -40036,7 +40036,7 @@ endobj
 /F4.0 37 0 R
 /F1.1 40 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [202 0 R]
@@ -40567,7 +40567,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -40583,7 +40583,7 @@ endobj
 endobj
 212 0 obj
 << /Limits [(getshortmidpointkey-an-optimization-for-data-index-block) (handling-of-errors-during-log-splitting)]
-/Names [(getshortmidpointkey-an-optimization-for-data-index-block) 4227 0 R (getting.involved) 3439 0 R (getting_started) 50 0 R (git.best.practices) 3719 0 R (git.patch.flow) 3772 0 R (goals) 4320 0 R (guide-for-hbase-committers) 3746 0 R (guidelines-for-deploying-a-coprocessor) 2423 0 R (guidelines-for-reporting-effective-issues) 3457 0 R (hadoop) 142 0 R (hadoop.native.lib) 4143 0 R (hadoop.policy.file) 391 0 R (handling-of-errors-during-log-splitting) 1696 0 R]
+/Names [(getshortmidpointkey-an-optimization-for-data-index-block) 4243 0 R (getting.involved) 3454 0 R (getting_started) 50 0 R (git.best.practices) 3737 0 R (git.patch.flow) 3790 0 R (goals) 4336 0 R (guide-for-hbase-committers) 3764 0 R (guidelines-for-deploying-a-coprocessor) 2438 0 R (guidelines-for-reporting-effective-issues) 3472 0 R (hadoop) 142 0 R (hadoop.native.lib) 4161 0 R (hadoop.policy.file) 391 0 R (handling-of-errors-during-log-splitting) 1711 0 R]
 >>
 endobj
 213 0 obj
@@ -41113,7 +41113,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -41635,7 +41635,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -42270,7 +42270,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -42779,7 +42779,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -43310,7 +43310,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -43889,7 +43889,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [258 0 R 259 0 R]
@@ -44456,7 +44456,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [267 0 R 268 0 R 270 0 R 271 0 R]
@@ -44520,7 +44520,7 @@ endobj
 endobj
 274 0 obj
 << /Limits [(hbase.table.lock.enable) (hbase.tmp.dir)]
-/Names [(hbase.table.lock.enable) 418 0 R (hbase.table.max.rowsize) 419 0 R (hbase.tags) 1373 0 R (hbase.tests) 3597 0 R (hbase.tests.categories) 3641 0 R (hbase.tests.cluster) 3643 0 R (hbase.tests.example.code) 3644 0 R (hbase.tests.rules) 3637 0 R (hbase.tests.sleeps) 3642 0 R (hbase.tests.writing) 3636 0 R (hbase.thrift.maxQueuedRequests) 423 0 R (hbase.thrift.maxWorkerThreads) 422 0 R (hbase.thrift.minWorkerThreads) 421 0 R (hbase.tmp.dir) 204 0 R]
+/Names [(hbase.table.lock.enable) 418 0 R (hbase.table.max.rowsize) 419 0 R (hbase.tags) 1373 0 R (hbase.tests) 3613 0 R (hbase.tests.categories) 3657 0 R (hbase.tests.cluster) 3659 0 R (hbase.tests.example.code) 3660 0 R (hbase.tests.rules) 3653 0 R (hbase.tests.sleeps) 3658 0 R (hbase.tests.writing) 3652 0 R (hbase.thrift.maxQueuedRequests) 423 0 R (hbase.thrift.maxWorkerThreads) 422 0 R (hbase.thrift.minWorkerThreads) 421 0 R (hbase.tmp.dir) 204 0 R]
 >>
 endobj
 275 0 obj
@@ -45076,7 +45076,7 @@ endobj
 /F1.0 10 0 R
 /F1.1 40 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -45671,7 +45671,7 @@ endobj
 /F4.0 37 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [291 0 R 292 0 R]
@@ -45691,7 +45691,7 @@ endobj
 endobj
 290 0 obj
 << /Limits [(hbase.cluster.distributed) (hbase.data.umask.enable)]
-/Names [(hbase.cluster.distributed) 206 0 R (hbase.column.max.version) 442 0 R (hbase.commit.msg.format) 3989 0 R (hbase.coordinated.state.manager.class) 484 0 R (hbase.coprocessor.abortonerror) 407 0 R (hbase.coprocessor.enabled) 402 0 R (hbase.coprocessor.master.classes) 406 0 R (hbase.coprocessor.region.classes) 405 0 R (hbase.coprocessor.user.enabled) 404 0 R (hbase.data.umask) 433 0 R (hbase.data.umask.enable) 432 0 R]
+/Names [(hbase.cluster.distributed) 206 0 R (hbase.column.max.version) 442 0 R (hbase.commit.msg.format) 4007 0 R (hbase.coordinated.state.manager.class) 484 0 R (hbase.coprocessor.abortonerror) 407 0 R (hbase.coprocessor.enabled) 402 0 R (hbase.coprocessor.master.classes) 406 0 R (hbase.coprocessor.region.classes) 405 0 R (hbase.coprocessor.user.enabled) 404 0 R (hbase.data.umask) 433 0 R (hbase.data.umask.enable) 432 0 R]
 >>
 endobj
 291 0 obj
@@ -46237,7 +46237,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -46728,7 +46728,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -47300,7 +47300,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -47846,7 +47846,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -48423,7 +48423,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -49043,7 +49043,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -49590,7 +49590,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -50113,7 +50113,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -50673,7 +50673,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [357 0 R]
@@ -51208,7 +51208,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [363 0 R]
@@ -51233,7 +51233,7 @@ endobj
 endobj
 366 0 obj
 << /Limits [(quota) (regions.arch)]
-/Names [(quota) 3273 0 R (read-api-and-usage) 1958 0 R (read-hbase-shell-commands-from-a-command-file) 803 0 R (reading-filtering-and-sending-edits) 3253 0 R (reading_cells_with_labels) 1427 0 R (recommended.configurations.hdfs) 553 0 R (recommended_configurations) 548 0 R (recommended_configurations.zk) 549 0 R (references) 3877 0 R (region-overlap-repairs) 4101 0 R (region-replication-for-meta-table-s-region) 1935 0 R (region-scanner-changes) 4447 0 R (region-transition-rpc-and-rs-heartbeat-can-arrive-at-same-time-on-master) 3910 0 R (regioncoprocessorenvironment-getregion-abstract-hregion) 4372 0 R (regioncoprocessorhost-postappend-append-append-result-result-void) 4373 0 R (regioncoprocessorhost-prestorefilereaderopen-filesystem-fs-path-p-fsdatainputstreamwrapper-in-long-size-cacheconfig-cacheconf-reference-r-storefile-reader) 4374 0 R (regionobserver) 4359 0 R (regions.arch) 1730 0 R]
+/Names [(quota) 3288 0 R (read-api-and-usage) 1974 0 R (read-hbase-shell-commands-from-a-command-file) 803 0 R (reading-filtering-and-sending-edits) 3268 0 R (reading_cells_with_labels) 1427 0 R (recommended.configurations.hdfs) 553 0 R (recommended_configurations) 548 0 R (recommended_configurations.zk) 549 0 R (references) 3895 0 R (region-overlap-repairs) 4119 0 R (region-replication-for-meta-table-s-region) 1951 0 R (region-scanner-changes) 4462 0 R (region-transition-rpc-and-rs-heartbeat-can-arrive-at-same-time-on-master) 3928 0 R (regioncoprocessorenvironment-getregion-abstract-hregion) 4387 0 R (regioncoprocessorhost-postappend-append-append-result-result-void) 4388 0 R (regioncoprocessorhost-prestorefilereaderopen-filesystem-fs-path-p-fsdatainputstreamwrapper-in-long-size-cacheconfig-cacheconf-reference-r-storefile-reader) 4389 0 R (regionobserver) 4375 0 R (regions.arch) 1745 0 R]
 >>
 endobj
 367 0 obj
@@ -51872,7 +51872,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [377 0 R 378 0 R 380 0 R 383 0 R 384 0 R]
@@ -51883,7 +51883,7 @@ endobj
 endobj
 375 0 obj
 << /Limits [(hbase.regionserver.thrift.compact) (hbase.rootdir.perms)]
-/Names [(hbase.regionserver.thrift.compact) 428 0 R (hbase.regionserver.thrift.framed) 424 0 R (hbase.regionserver.thrift.framed.max_frame_size_in_mb) 427 0 R (hbase.release.announcement) 3577 0 R (hbase.replication.management) 3223 0 R (hbase.replication.rpc.codec) 493 0 R (hbase.replication.source.maxthreads) 494 0 R (hbase.rest-csrf.browser-useragents-regex) 477 0 R (hbase.rest.csrf.enabled) 476 0 R (hbase.rest.filter.classes) 465 0 R (hbase.rest.port) 410 0 R (hbase.rest.readonly) 411 0 R (hbase.rest.support.proxyuser) 414 0 R (hbase.rest.threads.max) 412 0 R (hbase.rest.threads.min) 413 0 R (hbase.rolling.restart) 642 0 R (hbase.rolling.upgrade) 638 0 R (hbase.rootdir) 205 0 R (hbase.rootdir.perms) 429 0 R]
+/Names [(hbase.regionserver.thrift.compact) 428 0 R (hbase.regionserver.thrift.framed) 424 0 R (hbase.regionserver.thrift.framed.max_frame_size_in_mb) 427 0 R (hbase.release.announcement) 3592 0 R (hbase.replication.management) 3238 0 R (hbase.replication.rpc.codec) 493 0 R (hbase.replication.source.maxthreads) 494 0 R (hbase.rest-csrf.browser-useragents-regex) 477 0 R (hbase.rest.csrf.enabled) 476 0 R (hbase.rest.filter.classes) 465 0 R (hbase.rest.port) 410 0 R (hbase.rest.readonly) 411 0 R (hbase.rest.support.proxyuser) 414 0 R (hbase.rest.threads.max) 412 0 R (hbase.rest.threads.min) 413 0 R (hbase.rolling.restart) 642 0 R (hbase.rolling.upgrade) 638 0 R (hbase.rootdir) 205 0 R (hbase.rootdir.perms) 429 0 R]
 >>
 endobj
 376 0 obj
@@ -52462,7 +52462,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -52996,7 +52996,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -53546,7 +53546,7 @@ endobj
 /F4.0 37 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -54066,7 +54066,7 @@ endobj
 /F4.0 37 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -54599,7 +54599,7 @@ endobj
 /F4.0 37 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -54612,7 +54612,7 @@ endobj
 endobj
 420 0 obj
 << /Limits [(hbase.zookeeper.property.maxClientCnxns) (hbase_supported_tested_definitions)]
-/Names [(hbase.zookeeper.property.maxClientCnxns) 279 0 R (hbase.zookeeper.property.syncLimit) 273 0 R (hbase.zookeeper.quorum) 209 0 R (hbase_apis) 2177 0 R (hbase_default_configurations) 203 0 R (hbase_env) 537 0 R (hbase_metrics) 3165 0 R (hbase_mob) 1972 0 R (hbase_site) 533 0 R (hbase_supported_tested_definitions) 44 0 R]
+/Names [(hbase.zookeeper.property.maxClientCnxns) 279 0 R (hbase.zookeeper.property.syncLimit) 273 0 R (hbase.zookeeper.quorum) 209 0 R (hbase_apis) 2192 0 R (hbase_default_configurations) 203 0 R (hbase_env) 537 0 R (hbase_metrics) 3180 0 R (hbase_mob) 1988 0 R (hbase_site) 533 0 R (hbase_supported_tested_definitions) 44 0 R]
 >>
 endobj
 421 0 obj
@@ -55087,7 +55087,7 @@ endobj
 /F4.0 37 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -55103,7 +55103,7 @@ endobj
 endobj
 430 0 obj
 << /Limits [(hbase.rpc) (hbase.secure.spnego.ui)]
-/Names [(hbase.rpc) 4313 0 R (hbase.rpc.rows.warning.threshold) 516 0 R (hbase.rpc.shortoperation.timeout) 374 0 R (hbase.rpc.timeout) 368 0 R (hbase.rs.cacheblocksonwrite) 367 0 R (hbase.secure.bulkload) 1453 0 R (hbase.secure.configuration) 1293 0 R (hbase.secure.enable) 1457 0 R (hbase.secure.simpleconfiguration) 1335 0 R (hbase.secure.spnego.ui) 1288 0 R]
+/Names [(hbase.rpc) 4329 0 R (hbase.rpc.rows.warning.threshold) 516 0 R (hbase.rpc.shortoperation.timeout) 374 0 R (hbase.rpc.timeout) 368 0 R (hbase.rs.cacheblocksonwrite) 367 0 R (hbase.secure.bulkload) 1453 0 R (hbase.secure.configuration) 1293 0 R (hbase.secure.enable) 1457 0 R (hbase.secure.simpleconfiguration) 1335 0 R (hbase.secure.spnego.ui) 1288 0 R]
 >>
 endobj
 431 0 obj
@@ -55640,7 +55640,7 @@ endobj
 /F4.0 37 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -56208,7 +56208,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -56720,7 +56720,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -57263,7 +57263,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [468 0 R 469 0 R]
@@ -57902,7 +57902,7 @@ endobj
 /F4.0 37 0 R
 /F6.0 478 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [474 0 R 475 0 R]
@@ -57944,13 +57944,13 @@ endobj
 endobj
 478 0 obj
 << /Type /Font
-/BaseFont /066905+mplus1mn-bold
+/BaseFont /03fbbc+mplus1mn-bold
 /Subtype /TrueType
-/FontDescriptor 5249 0 R
+/FontDescriptor 5264 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 5251 0 R
-/ToUnicode 5250 0 R
+/Widths 5266 0 R
+/ToUnicode 5265 0 R
 >>
 endobj
 479 0 obj
@@ -58513,7 +58513,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -58529,7 +58529,7 @@ endobj
 endobj
 485 0 obj
 << /Limits [(hbase.defaults.for.version.skip) (hbase.hregion.percolumnfamilyflush.size.lower.bound.min)]
-/Names [(hbase.defaults.for.version.skip) 415 0 R (hbase.dfs.client.read.shortcircuit.buffer.size) 447 0 R (hbase.display.keys) 399 0 R (hbase.dynamic.jars.dir) 463 0 R (hbase.encryption.server) 1440 0 R (hbase.env.sh) 518 0 R (hbase.fix.version.in.jira) 3977 0 R (hbase.history) 4273 0 R (hbase.hregion.majorcompaction) 320 0 R (hbase.hregion.majorcompaction.jitter) 321 0 R (hbase.hregion.max.filesize) 319 0 R (hbase.hregion.memstore.block.multiplier) 315 0 R (hbase.hregion.memstore.flush.size) 312 0 R (hbase.hregion.memstore.mslab.enabled) 316 0 R (hbase.hregion.percolumnfamilyflush.size.lower.bound.min) 313 0 R]
+/Names [(hbase.defaults.for.version.skip) 415 0 R (hbase.dfs.client.read.shortcircuit.buffer.size) 447 0 R (hbase.display.keys) 399 0 R (hbase.dynamic.jars.dir) 463 0 R (hbase.encryption.server) 1440 0 R (hbase.env.sh) 518 0 R (hbase.fix.version.in.jira) 3995 0 R (hbase.history) 4289 0 R (hbase.hregion.majorcompaction) 320 0 R (hbase.hregion.majorcompaction.jitter) 321 0 R (hbase.hregion.max.filesize) 319 0 R (hbase.hregion.memstore.block.multiplier) 315 0 R (hbase.hregion.memstore.flush.size) 312 0 R (hbase.hregion.memstore.mslab.enabled) 316 0 R (hbase.hregion.percolumnfamilyflush.size.lower.bound.min) 313 0 R]
 >>
 endobj
 486 0 obj
@@ -59061,7 +59061,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -59587,7 +59587,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -60121,7 +60121,7 @@ endobj
 /F4.0 37 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -60136,8 +60136,8 @@ endobj
 [503 0 R /XYZ 0 490.9675 null]
 endobj
 507 0 obj
-<< /Limits [(hbase.moduletest.run) (hbase.org.site.publishing)]
-/Names [(hbase.moduletest.run) 3606 0 R (hbase.moduletest.shell) 3601 0 R (hbase.moduletests) 3599 0 R (hbase.normalizer.min.region.count) 306 0 R (hbase.normalizer.period) 305 0 R (hbase.offpeak.end.hour) 342 0 R (hbase.offpeak.start.hour) 341 0 R (hbase.org) 3588 0 R (hbase.org.site.contributing) 3590 0 R (hbase.org.site.publishing) 3592 0 R]
+<< /Limits [(hbase.moduletest.run) (hbase.procedure.store.wal.max.retries.before.roll)]
+/Names [(hbase.moduletest.run) 3622 0 R (hbase.moduletest.shell) 3617 0 R (hbase.moduletests) 3615 0 R (hbase.normalizer.min.region.count) 306 0 R (hbase.normalizer.period) 305 0 R (hbase.offpeak.end.hour) 342 0 R (hbase.offpeak.start.hour) 341 0 R (hbase.org) 3603 0 R (hbase.org.site.contributing) 3605 0 R (hbase.org.site.publishing) 3608 0 R (hbase.private.api) 634 0 R (hbase.procedure.master.classes) 483 0 R (hbase.procedure.regionserver.classes) 482 0 R (hbase.procedure.store.wal.max.retries.before.roll) 1604 0 R]
 >>
 endobj
 508 0 obj
@@ -60637,7 +60637,7 @@ endobj
 /F4.0 37 0 R
 /F2.0 31 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -61557,7 +61557,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -62632,7 +62632,7 @@ endobj
 /F4.0 37 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [526 0 R 527 0 R]
@@ -64103,7 +64103,7 @@ endobj
 /F3.0 35 0 R
 /F1.1 40 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -64627,7 +64627,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -65280,7 +65280,7 @@ endobj
 /F4.0 37 0 R
 /F1.1 40 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [542 0 R 543 0 R 545 0 R 546 0 R 547 0 R 552 0 R]
@@ -66035,7 +66035,7 @@ endobj
 /F2.0 31 0 R
 /F1.1 40 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [558 0 R 561 0 R 564 0 R]
@@ -66074,7 +66074,7 @@ endobj
 endobj
 563 0 obj
 << /Limits [(configuration) (coprocessor-implementation-overview)]
-/Names [(configuration) 106 0 R (configuration-2) 3194 0 R (configuration-3) 4064 0 R (configuration-files) 113 0 R (configuration-from-scratch) 3962 0 R (configuration-properties) 1940 0 R (configure-mob-compaction-mergeable-threshold) 1981 0 R (configure-mob-compaction-policy) 1980 0 R (configuring-columns-for-mob) 1977 0 R (configuring-server-wide-behavior-of-bloom-filters) 2547 0 R (configuring-the-rest-server-and-client) 2196 0 R (confirm) 196 0 R (connection-setup) 4325 0 R (constraints) 1092 0 R (contributing-to-documentation-or-other-strings) 4000 0 R (coprocessor-api-changes) 4349 0 R (coprocessor-implementation-overview) 2352 0 R]
+/Names [(configuration) 106 0 R (configuration-2) 3209 0 R (configuration-3) 4082 0 R (configuration-files) 113 0 R (configuration-from-scratch) 3980 0 R (configuration-properties) 1956 0 R (configure-mob-compaction-mergeable-threshold) 1997 0 R (configure-mob-compaction-policy) 1996 0 R (configuring-columns-for-mob) 1993 0 R (configuring-server-wide-behavior-of-bloom-filters) 2562 0 R (configuring-the-rest-server-and-client) 2211 0 R (confirm) 196 0 R (connection-setup) 4341 0 R (constraints) 1092 0 R (contributing-to-documentation-or-other-strings) 4018 0 R (coprocessor-api-changes) 4365 0 R (coprocessor-implementation-overview) 2367 0 R]
 >>
 endobj
 564 0 obj
@@ -66936,7 +66936,7 @@ endobj
 /F4.0 37 0 R
 /F5.1 47 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [568 0 R]
@@ -67750,7 +67750,7 @@ endobj
 /F5.1 47 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [572 0 R 573 0 R 574 0 R 578 0 R 581 0 R 582 0 R 583 0 R]
@@ -68492,7 +68492,7 @@ endobj
 /F1.1 40 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [587 0 R 588 0 R]
@@ -69924,7 +69924,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [592 0 R 593 0 R]
@@ -70701,7 +70701,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -71402,7 +71402,7 @@ endobj
 /F3.0 35 0 R
 /F5.1 47 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -72733,7 +72733,7 @@ endobj
 /F4.0 37 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -74228,7 +74228,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -74550,7 +74550,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -74748,7 +74748,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [608 0 R 609 0 R 610 0 R 611 0 R 612 0 R]
@@ -75502,7 +75502,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [617 0 R]
@@ -76419,7 +76419,7 @@ endobj
 /F3.0 35 0 R
 /F5.1 47 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [621 0 R]
@@ -78734,7 +78734,7 @@ endobj
 /F4.0 37 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [624 0 R 625 0 R]
@@ -79767,7 +79767,7 @@ endobj
 /F1.1 40 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [629 0 R 630 0 R 631 0 R]
@@ -80431,7 +80431,7 @@ endobj
 /F2.0 31 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [639 0 R 640 0 R 641 0 R 643 0 R 644 0 R 645 0 R]
@@ -81226,7 +81226,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [653 0 R]
@@ -81237,17 +81237,17 @@ endobj
 endobj
 649 0 obj
 << /Limits [(standalone.over.hdfs) (storefile-refresher)]
-/Names [(standalone.over.hdfs) 175 0 R (standalone_dist) 170 0 R (starting-and-stopping-the-rest-server) 2195 0 R (statemachineprocedure) 3873 0 R (static-loading) 2386 0 R (static-unloading) 2389 0 R (store) 1781 0 R (store-file-ttl) 1934 0 R (store.file.dir) 1796 0 R (store.memstore) 1782 0 R (storefile-changes) 4449 0 R (storefile-refresher) 1930 0 R]
+/Names [(standalone.over.hdfs) 175 0 R (standalone_dist) 170 0 R (starting-and-stopping-the-rest-server) 2210 0 R (statemachineprocedure) 3891 0 R (static-loading) 2401 0 R (static-unloading) 2404 0 R (store) 1796 0 R (store-file-ttl) 1950 0 R (store.file.dir) 1812 0 R (store.memstore) 1797 0 R (storefile-changes) 4464 0 R (storefile-refresher) 1946 0 R]
 >>
 endobj
 650 0 obj
 << /Limits [(__anchor-top) (build.thrift)]
-/Kids [161 0 R 3716 0 R 4409 0 R 1898 0 R 1477 0 R 3669 0 R 1640 0 R 4222 0 R 2129 0 R 2087 0 R 2044 0 R 2078 0 R]
+/Kids [161 0 R 3734 0 R 4424 0 R 1914 0 R 1477 0 R 3686 0 R 1655 0 R 4238 0 R 2144 0 R 2102 0 R 2059 0 R 2093 0 R]
 >>
 endobj
 651 0 obj
 << /Limits [(hbase.moduletest.run) (hbase.zookeeper.property.initLimit)]
-/Kids [507 0 R 4169 0 R 382 0 R 245 0 R 375 0 R 430 0 R 4314 0 R 462 0 R 274 0 R 3638 0 R 3609 0 R]
+/Kids [507 0 R 3606 0 R 382 0 R 245 0 R 375 0 R 430 0 R 4330 0 R 462 0 R 274 0 R 3654 0 R 3625 0 R]
 >>
 endobj
 652 0 obj
@@ -81911,7 +81911,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -82626,7 +82626,7 @@ endobj
 /F1.0 10 0 R
 /F2.0 31 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [661 0 R]
@@ -83093,7 +83093,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -83813,7 +83813,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [670 0 R 673 0 R]
@@ -85079,7 +85079,7 @@ endobj
 /F3.0 35 0 R
 /F2.0 31 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [676 0 R 677 0 R 678 0 R 679 0 R 680 0 R 681 0 R 682 0 R 683 0 R 686 0 R 687 0 R 688 0 R]
@@ -86015,7 +86015,7 @@ endobj
 /Font << /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [691 0 R 692 0 R 695 0 R]
@@ -86914,7 +86914,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [700 0 R 701 0 R 703 0 R 705 0 R 706 0 R]
@@ -87733,7 +87733,7 @@ endobj
 /Font << /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [709 0 R 712 0 R 715 0 R 716 0 R]
@@ -88737,7 +88737,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [721 0 R 723 0 R 725 0 R]
@@ -89557,7 +89557,7 @@ endobj
 /F3.0 35 0 R
 /F2.0 31 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [730 0 R 736 0 R 737 0 R 738 0 R]
@@ -90594,7 +90594,7 @@ endobj
 /F2.0 31 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [744 0 R 745 0 R 746 0 R 748 0 R 749 0 R 750 0 R 751 0 R 752 0 R 753 0 R 754 0 R 755 0 R 756 0 R 757 0 R 758 0 R 759 0 R]
@@ -91650,7 +91650,7 @@ endobj
 /F4.0 37 0 R
 /F1.1 40 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [762 0 R 763 0 R 765 0 R 766 0 R 767 0 R 768 0 R 769 0 R]
@@ -92402,7 +92402,7 @@ endobj
 /F2.0 31 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [772 0 R]
@@ -92650,7 +92650,7 @@ endobj
 /Font << /F1.0 10 0 R
 /F2.0 31 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -93004,7 +93004,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [784 0 R 785 0 R 786 0 R]
@@ -93205,7 +93205,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -93443,7 +93443,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [793 0 R]
@@ -94039,7 +94039,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [797 0 R]
@@ -94576,7 +94576,7 @@ endobj
 /F4.0 37 0 R
 /F2.0 31 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -94826,7 +94826,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -95310,7 +95310,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -95585,7 +95585,7 @@ endobj
 /F4.0 37 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -96022,7 +96022,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -96585,7 +96585,7 @@ endobj
 /Font << /F4.0 37 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -97196,7 +97196,7 @@ endobj
 /F3.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -98366,7 +98366,7 @@ endobj
 /F2.0 31 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [821 0 R]
@@ -98388,7 +98388,7 @@ endobj
 endobj
 823 0 obj
 << /Limits [(multiple-typed-queues) (new-system)]
-/Names [(multiple-typed-queues) 3286 0 R (multiwal) 1688 0 R (nagles) 580 0 R (namespace) 851 0 R (namespace_creation) 855 0 R (namespace_quotas) 3282 0 R (namespace_special) 856 0 R (network-saturation-the-winner) 2991 0 R (new-committers) 3747 0 R (new-configs) 3914 0 R (new-system) 3893 0 R]
+/Names [(multiple-typed-queues) 3301 0 R (multiwal) 1703 0 R (nagles) 580 0 R (namespace) 851 0 R (namespace_creation) 855 0 R (namespace_quotas) 3297 0 R (namespace_special) 856 0 R (network-saturation-the-winner) 3006 0 R (new-committers) 3765 0 R (new-configs) 3932 0 R (new-system) 3911 0 R]
 >>
 endobj
 824 0 obj
@@ -103652,7 +103652,7 @@ endobj
 /F2.0 31 0 R
 /F1.1 40 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -104028,7 +104028,7 @@ endobj
 /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -104643,7 +104643,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -107184,7 +107184,7 @@ endobj
 /F5.1 47 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [840 0 R 841 0 R 842 0 R]
@@ -110203,7 +110203,7 @@ endobj
 /Font << /F4.0 37 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -111637,7 +111637,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [848 0 R]
@@ -112421,7 +112421,7 @@ endobj
 /F4.0 37 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [852 0 R 853 0 R 854 0 R]
@@ -112623,7 +112623,7 @@ endobj
 /F4.0 37 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -112694,7 +112694,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -112798,7 +112798,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -113215,7 +113215,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -113335,7 +113335,7 @@ endobj
 /F3.0 35 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -115522,7 +115522,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [874 0 R 876 0 R 877 0 R 879 0 R 880 0 R 881 0 R 883 0 R 884 0 R]
@@ -115851,7 +115851,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [889 0 R 890 0 R 891 0 R 892 0 R]
@@ -115862,7 +115862,7 @@ endobj
 endobj
 888 0 obj
 << /Limits [(dfs.datanode.failed.volumes.tolerated) (dm.column.metadata)]
-/Names [(dfs.datanode.failed.volumes.tolerated) 556 0 R (dfs.datanode.max.transfer.threads) 160 0 R (dfs.domain.socket.path) 446 0 R (dialog) 3780 0 R (direct.memory) 1645 0 R (disable-nagle-for-rpc) 1154 0 R (disable.splitting) 567 0 R (disabling-metrics) 3172 0 R (disabling.blockcache) 579 0 R (discovering.available.metrics) 3173 0 R (distributed) 176 0 R (distributed.log.replay.failure.reasons) 1718 0 R (distributed.log.splitting) 1706 0 R (dm.column.metadata) 947 0 R]
+/Names [(dfs.datanode.failed.volumes.tolerated) 556 0 R (dfs.datanode.max.transfer.threads) 160 0 R (dfs.domain.socket.path) 446 0 R (dialog) 3798 0 R (direct.memory) 1660 0 R (disable-nagle-for-rpc) 1154 0 R (disable.splitting) 567 0 R (disabling-metrics) 3187 0 R (disabling.blockcache) 579 0 R (discovering.available.metrics) 3188 0 R (distributed) 176 0 R (distributed.log.replay.failure.reasons) 1733 0 R (distributed.log.splitting) 1721 0 R (dm.column.metadata) 947 0 R]
 >>
 endobj
 889 0 obj
@@ -116871,7 +116871,7 @@ endobj
 /F7.0 902 0 R
 /F7.1 903 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [897 0 R 898 0 R 901 0 R]
@@ -116882,7 +116882,7 @@ endobj
 endobj
 896 0 obj
 << /Limits [(upgrade2.0.ui.splitmerge.by.row) (using-the-java-api)]
-/Names [(upgrade2.0.ui.splitmerge.by.row) 719 0 R (upgrade2.0.zkconfig) 710 0 R (upgrading) 607 0 R (upgrading-2) 4059 0 R (use-cases-for-observer-coprocessors) 2364 0 R (user-interface) 1951 0 R (using-existing-zookeeper-ensemble) 3934 0 R (using-hbase-shell) 2402 0 R (using-rest-endpoints) 2199 0 R (using-secure-http-https-for-the-web-ui) 1284 0 R (using-the-code-rolling-restart-sh-code-script) 3151 0 R (using-the-java-api) 2403 0 R]
+/Names [(upgrade2.0.ui.splitmerge.by.row) 719 0 R (upgrade2.0.zkconfig) 710 0 R (upgrading) 607 0 R (upgrading-2) 4077 0 R (use-cases-for-observer-coprocessors) 2379 0 R (user-interface) 1967 0 R (using-existing-zookeeper-ensemble) 3952 0 R (using-hbase-shell) 2417 0 R (using-rest-endpoints) 2214 0 R (using-secure-http-https-for-the-web-ui) 1284 0 R (using-the-code-rolling-restart-sh-code-script) 3166 0 R (using-the-java-api) 2418 0 R]
 >>
 endobj
 897 0 obj
@@ -116912,7 +116912,7 @@ endobj
 endobj
 900 0 obj
 << /Limits [(rowcounter) (save-the-dataframe)]
-/Names [(rowcounter) 3095 0 R (rowcounter-example) 1229 0 R (rowkey.design) 989 0 R (rowkey.regionsplits) 1031 0 R (rowkey.scope) 1028 0 R (rpc) 4322 0 R (rpc.configs) 4341 0 R (rpc.logging) 2713 0 R (rpcscheduler-dispatch-callrunner-p1-abstract-void-1) 4380 0 R (rs.failover.details) 3255 0 R (rs_metrics) 3181 0 R (rsgroup) 3405 0 R (run-canary-test-as-daemon-mode) 3033 0 R (run.insitu) 3710 0 R (running-canary-in-a-kerberos-enabled-cluster) 3039 0 R (running-hbck-to-identify-inconsistencies) 4094 0 R (running-multiple-workloads-on-a-single-cluster) 3269 0 R (running-the-shell-in-non-interactive-mode) 792 0 R (save-the-dataframe) 2322 0 R]
+/Names [(rowcounter) 3110 0 R (rowcounter-example) 1229 0 R (rowkey.design) 989 0 R (rowkey.regionsplits) 1031 0 R (rowkey.scope) 1028 0 R (rpc) 4338 0 R (rpc.configs) 4357 0 R (rpc.logging) 2728 0 R (rpcscheduler-dispatch-callrunner-p1-abstract-void-1) 4395 0 R (rs.failover.details) 3270 0 R (rs_metrics) 3196 0 R (rsgroup) 3420 0 R (run-canary-test-as-daemon-mode) 3048 0 R (run.insitu) 3728 0 R (running-canary-in-a-kerberos-enabled-cluster) 3054 0 R (running-hbck-to-identify-inconsistencies) 4112 0 R (running-multiple-workloads-on-a-single-cluster) 3284 0 R (running-the-shell-in-non-interactive-mode) 792 0 R (save-the-dataframe) 2337 0 R]
 >>
 endobj
 901 0 obj
@@ -116930,22 +116930,22 @@ endobj
 << /Type /Font
 /BaseFont /0c7a4a+mplus-1p-regular
 /Subtype /TrueType
-/FontDescriptor 5253 0 R
+/FontDescriptor 5268 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 5255 0 R
-/ToUnicode 5254 0 R
+/Widths 5270 0 R
+/ToUnicode 5269 0 R
 >>
 endobj
 903 0 obj
 << /Type /Font
 /BaseFont /a99fc7+mplus-1p-regular
 /Subtype /TrueType
-/FontDescriptor 5257 0 R
+/FontDescriptor 5272 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 5259 0 R
-/ToUnicode 5258 0 R
+/Widths 5274 0 R
+/ToUnicode 5273 0 R
 >>
 endobj
 904 0 obj
@@ -118813,7 +118813,7 @@ endobj
 /F7.1 903 0 R
 /F2.0 31 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [906 0 R 907 0 R 910 0 R 911 0 R 912 0 R 913 0 R]
@@ -122749,7 +122749,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -123731,7 +123731,7 @@ endobj
 /F1.1 40 0 R
 /F5.1 47 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [924 0 R 925 0 R 926 0 R 927 0 R 928 0 R 929 0 R]
@@ -124623,7 +124623,7 @@ endobj
 /F2.0 31 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [933 0 R 934 0 R 936 0 R 937 0 R 939 0 R 941 0 R]
@@ -124788,7 +124788,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -124955,7 +124955,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [948 0 R]
@@ -125184,7 +125184,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -125302,7 +125302,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [955 0 R 956 0 R]
@@ -125715,7 +125715,7 @@ endobj
 /F1.0 10 0 R
 /F1.1 40 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [960 0 R 961 0 R 962 0 R 963 0 R 964 0 R 965 0 R]
@@ -127190,7 +127190,7 @@ endobj
 /F4.0 37 0 R
 /F5.1 47 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [969 0 R 970 0 R 971 0 R 973 0 R]
@@ -127710,7 +127710,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [977 0 R]
@@ -127927,7 +127927,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [981 0 R]
@@ -128244,7 +128244,7 @@ endobj
 /Font << /F2.0 31 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [985 0 R]
@@ -128660,7 +128660,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 >>
@@ -129389,7 +129389,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 37 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 >>
@@ -130322,7 +130322,7 @@ endobj
 /F4.0 37 0 R
 /F2.0 31 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [995 0 R 996 0 R 997 0 R 998 0 R 1001 0 R 1002 0 R 1003 0 R 1004 0 R 1005 0 R 1007 0 R 1008 0 R]
@@ -130952,7 +130952,7 @@ endobj
 /F2.0 31 0 R
 /F1.1 40 0 R
 >>
-/XObject << /Stamp1 4970 0 R
+/XObject << /Stamp1 4985 0 R
 >>
 >>
 /Annots [1011 0 R 1012 0 R 1013 0 R 1015 0 R 1017 0 R]
@@ -134533,7 +134533,7 @@ endobj
 /F5.1 47 0 R
 /F3.0 35 0 R
 >>
-/XObject << /Stamp2 4971 0 R
+/XObject << /Stamp2 4986 0 R
 >>
 >>
 /Annots [1023 0 R 1024 0 R]
@@ -135251,7 +135251,7 

<TRUNCATED>

[42/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
index 7c7d1af..f93a683 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1479">BucketCache.RAMQueueEntry</a>
+<pre>static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1497">BucketCache.RAMQueueEntry</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Block Entry stored in the memory with key,data and so on</div>
 </li>
@@ -199,9 +199,8 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </tr>
 <tr id="i3" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
             <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a>&nbsp;bucketAllocator,
-            <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a>&nbsp;realCacheSize)</code>&nbsp;</td>
 </tr>
 </table>
@@ -232,7 +231,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>key</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1480">key</a></pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1498">key</a></pre>
 </li>
 </ul>
 <a name="data">
@@ -241,7 +240,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>data</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1481">data</a></pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1499">data</a></pre>
 </li>
 </ul>
 <a name="accessCounter">
@@ -250,7 +249,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>accessCounter</h4>
-<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1482">accessCounter</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1500">accessCounter</a></pre>
 </li>
 </ul>
 <a name="inMemory">
@@ -259,7 +258,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>inMemory</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1483">inMemory</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1501">inMemory</a></pre>
 </li>
 </ul>
 </li>
@@ -276,7 +275,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>RAMQueueEntry</h4>
-<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1485">RAMQueueEntry</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;bck,
+<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1503">RAMQueueEntry</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;bck,
                      <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;data,
                      long&nbsp;accessCounter,
                      boolean&nbsp;inMemory)</pre>
@@ -296,7 +295,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getData</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1493">getData</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1511">getData</a>()</pre>
 </li>
 </ul>
 <a name="getKey--">
@@ -305,7 +304,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getKey</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1497">getKey</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1515">getKey</a>()</pre>
 </li>
 </ul>
 <a name="access-long-">
@@ -314,18 +313,17 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>access</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1501">access</a>(long&nbsp;accessCounter)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1519">access</a>(long&nbsp;accessCounter)</pre>
 </li>
 </ul>
-<a name="writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-java.util.concurrent.atomic.LongAdder-">
+<a name="writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-java.util.concurrent.atomic.LongAdder-">
 <!--   -->
 </a>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>writeToCache</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1505">writeToCache</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1523">writeToCache</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
                                             <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a>&nbsp;bucketAllocator,
-                                            <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap,
                                             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a>&nbsp;realCacheSize)
                                      throws <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CacheFullException.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">CacheFullException</a>,
                                             <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
index 1772d99..ddadbd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1390">BucketCache.SharedMemoryBucketEntry</a>
+<pre>static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1408">BucketCache.SharedMemoryBucketEntry</a>
 extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></pre>
 </li>
 </ul>
@@ -222,7 +222,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/Bu
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.hbase.io.hfile.bucket.<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></h3>
-<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#access-long-">access</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">deserializerReference</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getCachedTime--">getCachedTime</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getLength--">getLength</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getPriority--">getPriority</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#offset--">offset</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase
 .io.hfile.CacheableDeserializer-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">setDeserialiserReference</a></code></li>
+<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#access-long-">access</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference--">deserializerReference</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getAccessCounter--">getAccessCounter</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getCachedTime--">getCachedTime</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getLength--">getLength</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getPriority--">getPriority</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#offset--">offset</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfi
 le/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-">setDeserialiserReference</a></code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
@@ -251,7 +251,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/Bu
 <ul class="blockList">
 <li class="blockList">
 <h4>serialVersionUID</h4>
-<pre>private static final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1391">serialVersionUID</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1409">serialVersionUID</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.SharedMemoryBucketEntry.serialVersionUID">Constant Field Values</a></dd>
@@ -264,7 +264,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/Bu
 <ul class="blockList">
 <li class="blockList">
 <h4>markedForEvict</h4>
-<pre>private volatile&nbsp;boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1394">markedForEvict</a></pre>
+<pre>private volatile&nbsp;boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1412">markedForEvict</a></pre>
 </li>
 </ul>
 <a name="refCount">
@@ -273,7 +273,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/Bu
 <ul class="blockListLast">
 <li class="blockList">
 <h4>refCount</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1395">refCount</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1413">refCount</a></pre>
 </li>
 </ul>
 </li>
@@ -290,7 +290,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/Bu
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SharedMemoryBucketEntry</h4>
-<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1397">SharedMemoryBucketEntry</a>(long&nbsp;offset,
+<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1415">SharedMemoryBucketEntry</a>(long&nbsp;offset,
                         int&nbsp;length,
                         long&nbsp;accessCounter,
                         boolean&nbsp;inMemory)</pre>
@@ -310,7 +310,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/Bu
 <ul class="blockList">
 <li class="blockList">
 <h4>getRefCount</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1402">getRefCount</a>()</pre>
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1420">getRefCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getRefCount--">getRefCount</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></code></dd>
@@ -323,7 +323,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/Bu
 <ul class="blockList">
 <li class="blockList">
 <h4>incrementRefCountAndGet</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1407">incrementRefCountAndGet</a>()</pre>
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1425">incrementRefCountAndGet</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#incrementRefCountAndGet--">incrementRefCountAndGet</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></code></dd>
@@ -336,7 +336,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/Bu
 <ul class="blockList">
 <li class="blockList">
 <h4>decrementRefCountAndGet</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1412">decrementRefCountAndGet</a>()</pre>
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1430">decrementRefCountAndGet</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#decrementRefCountAndGet--">decrementRefCountAndGet</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></code></dd>
@@ -349,7 +349,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/Bu
 <ul class="blockList">
 <li class="blockList">
 <h4>isMarkedForEvict</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1417">isMarkedForEvict</a>()</pre>
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1435">isMarkedForEvict</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#isMarkedForEvict--">isMarkedForEvict</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></code></dd>
@@ -362,7 +362,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/Bu
 <ul class="blockListLast">
 <li class="blockList">
 <h4>markForEvict</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1422">markForEvict</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html#line.1440">markForEvict</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#markForEvict--">markForEvict</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
index 896f6bb..0e7febd 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.647">BucketCache.StatisticsThread</a>
+<pre>private static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.642">BucketCache.StatisticsThread</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true" title="class or interface in java.lang">Thread</a></pre>
 </li>
 </ul>
@@ -239,7 +239,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>bucketCache</h4>
-<pre>private final&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html#line.648">bucketCache</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html#line.643">bucketCache</a></pre>
 </li>
 </ul>
 </li>
@@ -256,7 +256,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>StatisticsThread</h4>
-<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html#line.650">StatisticsThread</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a>&nbsp;bucketCache)</pre>
+<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html#line.645">StatisticsThread</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a>&nbsp;bucketCache)</pre>
 </li>
 </ul>
 </li>
@@ -273,7 +273,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html#line.657">run</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html#line.652">run</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--" title="class or interface in java.lang">run</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true" title="class or interface in java.lang">Runnable</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
index 8a0b072..b4184e5 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.894">BucketCache.WriterThread</a>
+<pre>class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.889">BucketCache.WriterThread</a>
 extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.html" title="class in org.apache.hadoop.hbase.util">HasThread</a></pre>
 </li>
 </ul>
@@ -238,7 +238,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>inputQueue</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.895">inputQueue</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.890">inputQueue</a></pre>
 </li>
 </ul>
 <a name="writerEnabled">
@@ -247,7 +247,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockListLast">
 <li class="blockList">
 <h4>writerEnabled</h4>
-<pre>private volatile&nbsp;boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.896">writerEnabled</a></pre>
+<pre>private volatile&nbsp;boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.891">writerEnabled</a></pre>
 </li>
 </ul>
 </li>
@@ -264,7 +264,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WriterThread</h4>
-<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.898">WriterThread</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;queue)</pre>
+<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.893">WriterThread</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;queue)</pre>
 </li>
 </ul>
 </li>
@@ -281,7 +281,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>disableWriter</h4>
-<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.905">disableWriter</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.900">disableWriter</a>()</pre>
 </li>
 </ul>
 <a name="run--">
@@ -290,7 +290,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.910">run</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.905">run</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--" title="class or interface in java.lang">run</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true" title="class or interface in java.lang">Runnable</a></code></dd>
@@ -305,7 +305,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>putIntoBackingMap</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.944">putIntoBackingMap</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;key,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.939">putIntoBackingMap</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;key,
                                <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&nbsp;bucketEntry)</pre>
 <div class="block">Put the new bucket entry into backingMap. Notice that we are allowed to replace the existing
  cache with a new block for the same cache key. there's a corner case: one thread cache a
@@ -329,7 +329,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockListLast">
 <li class="blockList">
 <h4>doDrain</h4>
-<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.966">doDrain</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;entries)
+<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.961">doDrain</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;entries)
       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Flush the entries in ramCache to IOEngine and add bucket entry to backingMap.
  Process all that are passed in even if failure being sure to remove from ramCache else we'll


[09/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALFactory.Providers.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALFactory.Providers.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALFactory.Providers.html
index d2d8da1..5bbbf0c 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALFactory.Providers.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALFactory.Providers.html
@@ -90,391 +90,392 @@
 <span class="sourceLineNo">082</span>  static final String DEFAULT_WAL_PROVIDER = Providers.defaultProvider.name();<a name="line.82"></a>
 <span class="sourceLineNo">083</span><a name="line.83"></a>
 <span class="sourceLineNo">084</span>  public static final String META_WAL_PROVIDER = "hbase.wal.meta_provider";<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  static final String DEFAULT_META_WAL_PROVIDER = Providers.defaultProvider.name();<a name="line.85"></a>
-<span class="sourceLineNo">086</span><a name="line.86"></a>
-<span class="sourceLineNo">087</span>  final String factoryId;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  private final WALProvider provider;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  // The meta updates are written to a different wal. If this<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  // regionserver holds meta regions, then this ref will be non-null.<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  // lazily intialized; most RegionServers don't deal with META<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  private final AtomicReference&lt;WALProvider&gt; metaProvider = new AtomicReference&lt;&gt;();<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>  /**<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   * Configuration-specified WAL Reader used when a custom reader is requested<a name="line.95"></a>
-<span class="sourceLineNo">096</span>   */<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  private final Class&lt;? extends AbstractFSWALProvider.Reader&gt; logReaderClass;<a name="line.97"></a>
-<span class="sourceLineNo">098</span><a name="line.98"></a>
-<span class="sourceLineNo">099</span>  /**<a name="line.99"></a>
-<span class="sourceLineNo">100</span>   * How long to attempt opening in-recovery wals<a name="line.100"></a>
-<span class="sourceLineNo">101</span>   */<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  private final int timeoutMillis;<a name="line.102"></a>
-<span class="sourceLineNo">103</span><a name="line.103"></a>
-<span class="sourceLineNo">104</span>  private final Configuration conf;<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  // Used for the singleton WALFactory, see below.<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private WALFactory(Configuration conf) {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    // this code is duplicated here so we can keep our members final.<a name="line.108"></a>
-<span class="sourceLineNo">109</span>    // until we've moved reader/writer construction down into providers, this initialization must<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    // happen prior to provider initialization, in case they need to instantiate a reader/writer.<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    timeoutMillis = conf.getInt("hbase.hlog.open.timeout", 300000);<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    /* TODO Both of these are probably specific to the fs wal provider */<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    logReaderClass = conf.getClass("hbase.regionserver.hlog.reader.impl", ProtobufLogReader.class,<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      AbstractFSWALProvider.Reader.class);<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    this.conf = conf;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    // end required early initialization<a name="line.116"></a>
-<span class="sourceLineNo">117</span><a name="line.117"></a>
-<span class="sourceLineNo">118</span>    // this instance can't create wals, just reader/writers.<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    provider = null;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    factoryId = SINGLETON_ID;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  }<a name="line.121"></a>
-<span class="sourceLineNo">122</span><a name="line.122"></a>
-<span class="sourceLineNo">123</span>  @VisibleForTesting<a name="line.123"></a>
-<span class="sourceLineNo">124</span>  public Class&lt;? extends WALProvider&gt; getProviderClass(String key, String defaultValue) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    try {<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      Providers provider = Providers.valueOf(conf.get(key, defaultValue));<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      if (provider != Providers.defaultProvider) {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        // User gives a wal provider explicitly, just use that one<a name="line.128"></a>
-<span class="sourceLineNo">129</span>        return provider.clazz;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>      }<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      // AsyncFSWAL has better performance in most cases, and also uses less resources, we will try<a name="line.131"></a>
-<span class="sourceLineNo">132</span>      // to use it if possible. But it deeply hacks into the internal of DFSClient so will be easily<a name="line.132"></a>
-<span class="sourceLineNo">133</span>      // broken when upgrading hadoop. If it is broken, then we fall back to use FSHLog.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      if (AsyncFSWALProvider.load()) {<a name="line.134"></a>
-<span class="sourceLineNo">135</span>        return AsyncFSWALProvider.class;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>      } else {<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        return FSHLogProvider.class;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>      }<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    } catch (IllegalArgumentException exception) {<a name="line.139"></a>
-<span class="sourceLineNo">140</span>      // Fall back to them specifying a class name<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      // Note that the passed default class shouldn't actually be used, since the above only fails<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      // when there is a config value present.<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      return conf.getClass(key, Providers.defaultProvider.clazz, WALProvider.class);<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    }<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  static WALProvider createProvider(Class&lt;? extends WALProvider&gt; clazz) throws IOException {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    LOG.info("Instantiating WALProvider of type {}", clazz);<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    try {<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      return clazz.getDeclaredConstructor().newInstance();<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    } catch (Exception e) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      LOG.error("couldn't set up WALProvider, the configured class is " + clazz);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      LOG.debug("Exception details for failure to load WALProvider.", e);<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      throw new IOException("couldn't set up WALProvider", e);<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    }<a name="line.155"></a>
-<span class="sourceLineNo">156</span>  }<a name="line.156"></a>
-<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span>  /**<a name="line.158"></a>
-<span class="sourceLineNo">159</span>   * @param conf must not be null, will keep a reference to read params in later reader/writer<a name="line.159"></a>
-<span class="sourceLineNo">160</span>   *          instances.<a name="line.160"></a>
-<span class="sourceLineNo">161</span>   * @param factoryId a unique identifier for this factory. used i.e. by filesystem implementations<a name="line.161"></a>
-<span class="sourceLineNo">162</span>   *          to make a directory<a name="line.162"></a>
-<span class="sourceLineNo">163</span>   */<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  public WALFactory(Configuration conf, String factoryId) throws IOException {<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    // default enableSyncReplicationWALProvider is true, only disable SyncReplicationWALProvider<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    // for HMaster or HRegionServer which take system table only. See HBASE-19999<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    this(conf, factoryId, true);<a name="line.167"></a>
-<span class="sourceLineNo">168</span>  }<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>  /**<a name="line.170"></a>
-<span class="sourceLineNo">171</span>   * @param conf must not be null, will keep a reference to read params in later reader/writer<a name="line.171"></a>
-<span class="sourceLineNo">172</span>   *          instances.<a name="line.172"></a>
-<span class="sourceLineNo">173</span>   * @param factoryId a unique identifier for this factory. used i.e. by filesystem implementations<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   *          to make a directory<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param enableSyncReplicationWALProvider whether wrap the wal provider to a<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   *          {@link SyncReplicationWALProvider}<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   */<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  public WALFactory(Configuration conf, String factoryId, boolean enableSyncReplicationWALProvider)<a name="line.178"></a>
-<span class="sourceLineNo">179</span>      throws IOException {<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    // until we've moved reader/writer construction down into providers, this initialization must<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    // happen prior to provider initialization, in case they need to instantiate a reader/writer.<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    timeoutMillis = conf.getInt("hbase.hlog.open.timeout", 300000);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    /* TODO Both of these are probably specific to the fs wal provider */<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    logReaderClass = conf.getClass("hbase.regionserver.hlog.reader.impl", ProtobufLogReader.class,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      AbstractFSWALProvider.Reader.class);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    this.conf = conf;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    this.factoryId = factoryId;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    // end required early initialization<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    if (conf.getBoolean("hbase.regionserver.hlog.enabled", true)) {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>      WALProvider provider = createProvider(getProviderClass(WAL_PROVIDER, DEFAULT_WAL_PROVIDER));<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      if (enableSyncReplicationWALProvider) {<a name="line.191"></a>
-<span class="sourceLineNo">192</span>        provider = new SyncReplicationWALProvider(provider);<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      }<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      provider.init(this, conf, null);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      provider.addWALActionsListener(new MetricsWAL());<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      this.provider = provider;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    } else {<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      // special handling of existing configuration behavior.<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      LOG.warn("Running with WAL disabled.");<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      provider = new DisabledWALProvider();<a name="line.200"></a>
-<span class="sourceLineNo">201</span>      provider.init(this, conf, factoryId);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>    }<a name="line.202"></a>
-<span class="sourceLineNo">203</span>  }<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>  /**<a name="line.205"></a>
-<span class="sourceLineNo">206</span>   * Shutdown all WALs and clean up any underlying storage.<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * Use only when you will not need to replay and edits that have gone to any wals from this<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   * factory.<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   */<a name="line.209"></a>
-<span class="sourceLineNo">210</span>  public void close() throws IOException {<a name="line.210"></a>
-<span class="sourceLineNo">211</span>    final WALProvider metaProvider = this.metaProvider.get();<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    if (null != metaProvider) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      metaProvider.close();<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    // close is called on a WALFactory with null provider in the case of contention handling<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    // within the getInstance method.<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    if (null != provider) {<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      provider.close();<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    }<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  }<a name="line.220"></a>
-<span class="sourceLineNo">221</span><a name="line.221"></a>
-<span class="sourceLineNo">222</span>  /**<a name="line.222"></a>
-<span class="sourceLineNo">223</span>   * Tell the underlying WAL providers to shut down, but do not clean up underlying storage.<a name="line.223"></a>
-<span class="sourceLineNo">224</span>   * If you are not ending cleanly and will need to replay edits from this factory's wals,<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * use this method if you can as it will try to leave things as tidy as possible.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
-<span class="sourceLineNo">227</span>  public void shutdown() throws IOException {<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    IOException exception = null;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    final WALProvider metaProvider = this.metaProvider.get();<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    if (null != metaProvider) {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      try {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>        metaProvider.shutdown();<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      } catch(IOException ioe) {<a name="line.233"></a>
-<span class="sourceLineNo">234</span>        exception = ioe;<a name="line.234"></a>
-<span class="sourceLineNo">235</span>      }<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    provider.shutdown();<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    if (null != exception) {<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      throw exception;<a name="line.239"></a>
-<span class="sourceLineNo">240</span>    }<a name="line.240"></a>
-<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
-<span class="sourceLineNo">242</span><a name="line.242"></a>
-<span class="sourceLineNo">243</span>  public List&lt;WAL&gt; getWALs() {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>    return provider.getWALs();<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  }<a name="line.245"></a>
-<span class="sourceLineNo">246</span><a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private WALProvider getMetaProvider() throws IOException {<a name="line.247"></a>
+<span class="sourceLineNo">085</span><a name="line.85"></a>
+<span class="sourceLineNo">086</span>  final String factoryId;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  private final WALProvider provider;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  // The meta updates are written to a different wal. If this<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  // regionserver holds meta regions, then this ref will be non-null.<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  // lazily intialized; most RegionServers don't deal with META<a name="line.90"></a>
+<span class="sourceLineNo">091</span>  private final AtomicReference&lt;WALProvider&gt; metaProvider = new AtomicReference&lt;&gt;();<a name="line.91"></a>
+<span class="sourceLineNo">092</span><a name="line.92"></a>
+<span class="sourceLineNo">093</span>  /**<a name="line.93"></a>
+<span class="sourceLineNo">094</span>   * Configuration-specified WAL Reader used when a custom reader is requested<a name="line.94"></a>
+<span class="sourceLineNo">095</span>   */<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  private final Class&lt;? extends AbstractFSWALProvider.Reader&gt; logReaderClass;<a name="line.96"></a>
+<span class="sourceLineNo">097</span><a name="line.97"></a>
+<span class="sourceLineNo">098</span>  /**<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   * How long to attempt opening in-recovery wals<a name="line.99"></a>
+<span class="sourceLineNo">100</span>   */<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private final int timeoutMillis;<a name="line.101"></a>
+<span class="sourceLineNo">102</span><a name="line.102"></a>
+<span class="sourceLineNo">103</span>  private final Configuration conf;<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>  // Used for the singleton WALFactory, see below.<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private WALFactory(Configuration conf) {<a name="line.106"></a>
+<span class="sourceLineNo">107</span>    // this code is duplicated here so we can keep our members final.<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    // until we've moved reader/writer construction down into providers, this initialization must<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    // happen prior to provider initialization, in case they need to instantiate a reader/writer.<a name="line.109"></a>
+<span class="sourceLineNo">110</span>    timeoutMillis = conf.getInt("hbase.hlog.open.timeout", 300000);<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    /* TODO Both of these are probably specific to the fs wal provider */<a name="line.111"></a>
+<span class="sourceLineNo">112</span>    logReaderClass = conf.getClass("hbase.regionserver.hlog.reader.impl", ProtobufLogReader.class,<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      AbstractFSWALProvider.Reader.class);<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    this.conf = conf;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    // end required early initialization<a name="line.115"></a>
+<span class="sourceLineNo">116</span><a name="line.116"></a>
+<span class="sourceLineNo">117</span>    // this instance can't create wals, just reader/writers.<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    provider = null;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    factoryId = SINGLETON_ID;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  @VisibleForTesting<a name="line.122"></a>
+<span class="sourceLineNo">123</span>  public Class&lt;? extends WALProvider&gt; getProviderClass(String key, String defaultValue) {<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    try {<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      Providers provider = Providers.valueOf(conf.get(key, defaultValue));<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      if (provider != Providers.defaultProvider) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>        // User gives a wal provider explicitly, just use that one<a name="line.127"></a>
+<span class="sourceLineNo">128</span>        return provider.clazz;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      }<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      // AsyncFSWAL has better performance in most cases, and also uses less resources, we will try<a name="line.130"></a>
+<span class="sourceLineNo">131</span>      // to use it if possible. But it deeply hacks into the internal of DFSClient so will be easily<a name="line.131"></a>
+<span class="sourceLineNo">132</span>      // broken when upgrading hadoop. If it is broken, then we fall back to use FSHLog.<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      if (AsyncFSWALProvider.load()) {<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        return AsyncFSWALProvider.class;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      } else {<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        return FSHLogProvider.class;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>      }<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    } catch (IllegalArgumentException exception) {<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      // Fall back to them specifying a class name<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      // Note that the passed default class shouldn't actually be used, since the above only fails<a name="line.140"></a>
+<span class="sourceLineNo">141</span>      // when there is a config value present.<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      return conf.getClass(key, Providers.defaultProvider.clazz, WALProvider.class);<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    }<a name="line.143"></a>
+<span class="sourceLineNo">144</span>  }<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>  static WALProvider createProvider(Class&lt;? extends WALProvider&gt; clazz) throws IOException {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    LOG.info("Instantiating WALProvider of type {}", clazz);<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    try {<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      return clazz.getDeclaredConstructor().newInstance();<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    } catch (Exception e) {<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      LOG.error("couldn't set up WALProvider, the configured class is " + clazz);<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      LOG.debug("Exception details for failure to load WALProvider.", e);<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      throw new IOException("couldn't set up WALProvider", e);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    }<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>  /**<a name="line.157"></a>
+<span class="sourceLineNo">158</span>   * @param conf must not be null, will keep a reference to read params in later reader/writer<a name="line.158"></a>
+<span class="sourceLineNo">159</span>   *          instances.<a name="line.159"></a>
+<span class="sourceLineNo">160</span>   * @param factoryId a unique identifier for this factory. used i.e. by filesystem implementations<a name="line.160"></a>
+<span class="sourceLineNo">161</span>   *          to make a directory<a name="line.161"></a>
+<span class="sourceLineNo">162</span>   */<a name="line.162"></a>
+<span class="sourceLineNo">163</span>  public WALFactory(Configuration conf, String factoryId) throws IOException {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    // default enableSyncReplicationWALProvider is true, only disable SyncReplicationWALProvider<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    // for HMaster or HRegionServer which take system table only. See HBASE-19999<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    this(conf, factoryId, true);<a name="line.166"></a>
+<span class="sourceLineNo">167</span>  }<a name="line.167"></a>
+<span class="sourceLineNo">168</span><a name="line.168"></a>
+<span class="sourceLineNo">169</span>  /**<a name="line.169"></a>
+<span class="sourceLineNo">170</span>   * @param conf must not be null, will keep a reference to read params in later reader/writer<a name="line.170"></a>
+<span class="sourceLineNo">171</span>   *          instances.<a name="line.171"></a>
+<span class="sourceLineNo">172</span>   * @param factoryId a unique identifier for this factory. used i.e. by filesystem implementations<a name="line.172"></a>
+<span class="sourceLineNo">173</span>   *          to make a directory<a name="line.173"></a>
+<span class="sourceLineNo">174</span>   * @param enableSyncReplicationWALProvider whether wrap the wal provider to a<a name="line.174"></a>
+<span class="sourceLineNo">175</span>   *          {@link SyncReplicationWALProvider}<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   */<a name="line.176"></a>
+<span class="sourceLineNo">177</span>  public WALFactory(Configuration conf, String factoryId, boolean enableSyncReplicationWALProvider)<a name="line.177"></a>
+<span class="sourceLineNo">178</span>      throws IOException {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    // until we've moved reader/writer construction down into providers, this initialization must<a name="line.179"></a>
+<span class="sourceLineNo">180</span>    // happen prior to provider initialization, in case they need to instantiate a reader/writer.<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    timeoutMillis = conf.getInt("hbase.hlog.open.timeout", 300000);<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    /* TODO Both of these are probably specific to the fs wal provider */<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    logReaderClass = conf.getClass("hbase.regionserver.hlog.reader.impl", ProtobufLogReader.class,<a name="line.183"></a>
+<span class="sourceLineNo">184</span>      AbstractFSWALProvider.Reader.class);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    this.conf = conf;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    this.factoryId = factoryId;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>    // end required early initialization<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    if (conf.getBoolean("hbase.regionserver.hlog.enabled", true)) {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>      WALProvider provider = createProvider(getProviderClass(WAL_PROVIDER, DEFAULT_WAL_PROVIDER));<a name="line.189"></a>
+<span class="sourceLineNo">190</span>      if (enableSyncReplicationWALProvider) {<a name="line.190"></a>
+<span class="sourceLineNo">191</span>        provider = new SyncReplicationWALProvider(provider);<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      }<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      provider.init(this, conf, null);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      provider.addWALActionsListener(new MetricsWAL());<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      this.provider = provider;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    } else {<a name="line.196"></a>
+<span class="sourceLineNo">197</span>      // special handling of existing configuration behavior.<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      LOG.warn("Running with WAL disabled.");<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      provider = new DisabledWALProvider();<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      provider.init(this, conf, factoryId);<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
+<span class="sourceLineNo">202</span>  }<a name="line.202"></a>
+<span class="sourceLineNo">203</span><a name="line.203"></a>
+<span class="sourceLineNo">204</span>  /**<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * Shutdown all WALs and clean up any underlying storage.<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * Use only when you will not need to replay and edits that have gone to any wals from this<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * factory.<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public void close() throws IOException {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>    final WALProvider metaProvider = this.metaProvider.get();<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    if (null != metaProvider) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      metaProvider.close();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    }<a name="line.213"></a>
+<span class="sourceLineNo">214</span>    // close is called on a WALFactory with null provider in the case of contention handling<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    // within the getInstance method.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    if (null != provider) {<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      provider.close();<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    }<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  }<a name="line.219"></a>
+<span class="sourceLineNo">220</span><a name="line.220"></a>
+<span class="sourceLineNo">221</span>  /**<a name="line.221"></a>
+<span class="sourceLineNo">222</span>   * Tell the underlying WAL providers to shut down, but do not clean up underlying storage.<a name="line.222"></a>
+<span class="sourceLineNo">223</span>   * If you are not ending cleanly and will need to replay edits from this factory's wals,<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * use this method if you can as it will try to leave things as tidy as possible.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   */<a name="line.225"></a>
+<span class="sourceLineNo">226</span>  public void shutdown() throws IOException {<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    IOException exception = null;<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    final WALProvider metaProvider = this.metaProvider.get();<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    if (null != metaProvider) {<a name="line.229"></a>
+<span class="sourceLineNo">230</span>      try {<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        metaProvider.shutdown();<a name="line.231"></a>
+<span class="sourceLineNo">232</span>      } catch(IOException ioe) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>        exception = ioe;<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      }<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    provider.shutdown();<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    if (null != exception) {<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      throw exception;<a name="line.238"></a>
+<span class="sourceLineNo">239</span>    }<a name="line.239"></a>
+<span class="sourceLineNo">240</span>  }<a name="line.240"></a>
+<span class="sourceLineNo">241</span><a name="line.241"></a>
+<span class="sourceLineNo">242</span>  public List&lt;WAL&gt; getWALs() {<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    return provider.getWALs();<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  }<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>  @VisibleForTesting<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  WALProvider getMetaProvider() throws IOException {<a name="line.247"></a>
 <span class="sourceLineNo">248</span>    for (;;) {<a name="line.248"></a>
 <span class="sourceLineNo">249</span>      WALProvider provider = this.metaProvider.get();<a name="line.249"></a>
 <span class="sourceLineNo">250</span>      if (provider != null) {<a name="line.250"></a>
 <span class="sourceLineNo">251</span>        return provider;<a name="line.251"></a>
 <span class="sourceLineNo">252</span>      }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      provider = createProvider(getProviderClass(META_WAL_PROVIDER, DEFAULT_META_WAL_PROVIDER));<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      provider.init(this, conf, AbstractFSWALProvider.META_WAL_PROVIDER_ID);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      provider.addWALActionsListener(new MetricsWAL());<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      if (metaProvider.compareAndSet(null, provider)) {<a name="line.256"></a>
-<span class="sourceLineNo">257</span>        return provider;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      } else {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>        // someone is ahead of us, close and try again.<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        provider.close();<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      }<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    }<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  }<a name="line.263"></a>
-<span class="sourceLineNo">264</span><a name="line.264"></a>
-<span class="sourceLineNo">265</span>  /**<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * @param region the region which we want to get a WAL for it. Could be null.<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   */<a name="line.267"></a>
-<span class="sourceLineNo">268</span>  public WAL getWAL(RegionInfo region) throws IOException {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    // use different WAL for hbase:meta<a name="line.269"></a>
-<span class="sourceLineNo">270</span>    if (region != null &amp;&amp; region.isMetaRegion() &amp;&amp;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      region.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      return getMetaProvider().getWAL(region);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    } else {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return provider.getWAL(region);<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  }<a name="line.276"></a>
-<span class="sourceLineNo">277</span><a name="line.277"></a>
-<span class="sourceLineNo">278</span>  public Reader createReader(final FileSystem fs, final Path path) throws IOException {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    return createReader(fs, path, (CancelableProgressable)null);<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
-<span class="sourceLineNo">283</span>   * Create a reader for the WAL. If you are reading from a file that's being written to and need<a name="line.283"></a>
-<span class="sourceLineNo">284</span>   * to reopen it multiple times, use {@link WAL.Reader#reset()} instead of this method<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * then just seek back to the last known good position.<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * @return A WAL reader.  Close when done with it.<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   * @throws IOException<a name="line.287"></a>
-<span class="sourceLineNo">288</span>   */<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  public Reader createReader(final FileSystem fs, final Path path,<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      CancelableProgressable reporter) throws IOException {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    return createReader(fs, path, reporter, true);<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>  public Reader createReader(final FileSystem fs, final Path path, CancelableProgressable reporter,<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      boolean allowCustom) throws IOException {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    Class&lt;? extends AbstractFSWALProvider.Reader&gt; lrClass =<a name="line.296"></a>
-<span class="sourceLineNo">297</span>        allowCustom ? logReaderClass : ProtobufLogReader.class;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    try {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      // A wal file could be under recovery, so it may take several<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      // tries to get it open. Instead of claiming it is corrupted, retry<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      // to open it up to 5 minutes by default.<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      long startWaiting = EnvironmentEdgeManager.currentTime();<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      long openTimeout = timeoutMillis + startWaiting;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      int nbAttempt = 0;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      AbstractFSWALProvider.Reader reader = null;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      while (true) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        try {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          reader = lrClass.getDeclaredConstructor().newInstance();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>          reader.init(fs, path, conf, null);<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          return reader;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>        } catch (IOException e) {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>          if (reader != null) {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>            try {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>              reader.close();<a name="line.314"></a>
-<span class="sourceLineNo">315</span>            } catch (IOException exception) {<a name="line.315"></a>
-<span class="sourceLineNo">316</span>              LOG.warn("Could not close FSDataInputStream" + exception.getMessage());<a name="line.316"></a>
-<span class="sourceLineNo">317</span>              LOG.debug("exception details", exception);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>            }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>          }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>          String msg = e.getMessage();<a name="line.321"></a>
-<span class="sourceLineNo">322</span>          if (msg != null<a name="line.322"></a>
-<span class="sourceLineNo">323</span>              &amp;&amp; (msg.contains("Cannot obtain block length")<a name="line.323"></a>
-<span class="sourceLineNo">324</span>                  || msg.contains("Could not obtain the last block") || msg<a name="line.324"></a>
-<span class="sourceLineNo">325</span>                    .matches("Blocklist for [^ ]* has changed.*"))) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>            if (++nbAttempt == 1) {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>              LOG.warn("Lease should have recovered. This is not expected. Will retry", e);<a name="line.327"></a>
-<span class="sourceLineNo">328</span>            }<a name="line.328"></a>
-<span class="sourceLineNo">329</span>            if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>              throw new InterruptedIOException("Operation is cancelled");<a name="line.330"></a>
-<span class="sourceLineNo">331</span>            }<a name="line.331"></a>
-<span class="sourceLineNo">332</span>            if (nbAttempt &gt; 2 &amp;&amp; openTimeout &lt; EnvironmentEdgeManager.currentTime()) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>              LOG.error("Can't open after " + nbAttempt + " attempts and "<a name="line.333"></a>
-<span class="sourceLineNo">334</span>                  + (EnvironmentEdgeManager.currentTime() - startWaiting) + "ms " + " for " + path);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>            } else {<a name="line.335"></a>
-<span class="sourceLineNo">336</span>              try {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>                Thread.sleep(nbAttempt &lt; 3 ? 500 : 1000);<a name="line.337"></a>
-<span class="sourceLineNo">338</span>                continue; // retry<a name="line.338"></a>
-<span class="sourceLineNo">339</span>              } catch (InterruptedException ie) {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>                InterruptedIOException iioe = new InterruptedIOException();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>                iioe.initCause(ie);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>                throw iioe;<a name="line.342"></a>
-<span class="sourceLineNo">343</span>              }<a name="line.343"></a>
-<span class="sourceLineNo">344</span>            }<a name="line.344"></a>
-<span class="sourceLineNo">345</span>            throw new LeaseNotRecoveredException(e);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>          } else {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>            throw e;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          }<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        }<a name="line.349"></a>
-<span class="sourceLineNo">350</span>      }<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    } catch (IOException ie) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      throw ie;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } catch (Exception e) {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      throw new IOException("Cannot get log reader", e);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    }<a name="line.355"></a>
-<span class="sourceLineNo">356</span>  }<a name="line.356"></a>
-<span class="sourceLineNo">357</span><a name="line.357"></a>
-<span class="sourceLineNo">358</span>  /**<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * Create a writer for the WAL.<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * Uses defaults.<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * &lt;p&gt;<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Should be package-private. public only for tests and<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * {@link org.apache.hadoop.hbase.regionserver.wal.Compressor}<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @return A WAL writer. Close when done with it.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   */<a name="line.365"></a>
-<span class="sourceLineNo">366</span>  public Writer createWALWriter(final FileSystem fs, final Path path) throws IOException {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    return FSHLogProvider.createWriter(conf, fs, path, false);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  }<a name="line.368"></a>
-<span class="sourceLineNo">369</span><a name="line.369"></a>
-<span class="sourceLineNo">370</span>  /**<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * Should be package-private, visible for recovery testing.<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   * Uses defaults.<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @return an overwritable writer for recovered edits. caller should close.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
-<span class="sourceLineNo">375</span>  @VisibleForTesting<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public Writer createRecoveredEditsWriter(final FileSystem fs, final Path path)<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      throws IOException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    return FSHLogProvider.createWriter(conf, fs, path, true);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>  }<a name="line.379"></a>
-<span class="sourceLineNo">380</span><a name="line.380"></a>
-<span class="sourceLineNo">381</span>  // These static methods are currently used where it's impractical to<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  // untangle the reliance on state in the filesystem. They rely on singleton<a name="line.382"></a>
-<span class="sourceLineNo">383</span>  // WALFactory that just provides Reader / Writers.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  // For now, first Configuration object wins. Practically this just impacts the reader/writer class<a name="line.384"></a>
-<span class="sourceLineNo">385</span>  private static final AtomicReference&lt;WALFactory&gt; singleton = new AtomicReference&lt;&gt;();<a name="line.385"></a>
-<span class="sourceLineNo">386</span>  private static final String SINGLETON_ID = WALFactory.class.getName();<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  <a name="line.387"></a>
-<span class="sourceLineNo">388</span>  // Public only for FSHLog<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static WALFactory getInstance(Configuration configuration) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    WALFactory factory = singleton.get();<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    if (null == factory) {<a name="line.391"></a>
-<span class="sourceLineNo">392</span>      WALFactory temp = new WALFactory(configuration);<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      if (singleton.compareAndSet(null, temp)) {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        factory = temp;<a name="line.394"></a>
-<span class="sourceLineNo">395</span>      } else {<a name="line.395"></a>
-<span class="sourceLineNo">396</span>        // someone else beat us to initializing<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        try {<a name="line.397"></a>
-<span class="sourceLineNo">398</span>          temp.close();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>        } catch (IOException exception) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>          LOG.debug("failed to close temporary singleton. ignoring.", exception);<a name="line.400"></a>
-<span class="sourceLineNo">401</span>        }<a name="line.401"></a>
-<span class="sourceLineNo">402</span>        factory = singleton.get();<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      }<a name="line.403"></a>
-<span class="sourceLineNo">404</span>    }<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    return factory;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * Create a reader for the given path, accept custom reader classes from conf.<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return a WAL Reader, caller must close.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  public static Reader createReader(final FileSystem fs, final Path path,<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      final Configuration configuration) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return getInstance(configuration).createReader(fs, path);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  /**<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * Create a reader for the given path, accept custom reader classes from conf.<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @return a WAL Reader, caller must close.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   */<a name="line.422"></a>
-<span class="sourceLineNo">423</span>  static Reader createReader(final FileSystem fs, final Path path,<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      final Configuration configuration, final CancelableProgressable reporter) throws IOException {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>    return getInstance(configuration).createReader(fs, path, reporter);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>  }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>  /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>   * Create a reader for the given path, ignore custom reader classes from conf.<a name="line.429"></a>
-<span class="sourceLineNo">430</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * only public pending move of {@link org.apache.hadoop.hbase.regionserver.wal.Compressor}<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * @return a WAL Reader, caller must close.<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   */<a name="line.433"></a>
-<span class="sourceLineNo">434</span>  public static Reader createReaderIgnoreCustomClass(final FileSystem fs, final Path path,<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      final Configuration configuration) throws IOException {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    return getInstance(configuration).createReader(fs, path, null, false);<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  /**<a name="line.439"></a>
-<span class="sourceLineNo">440</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.440"></a>
-<span class="sourceLineNo">441</span>   * Uses defaults.<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * @return a Writer that will overwrite files. Caller must close.<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  static Writer createRecoveredEditsWriter(final FileSystem fs, final Path path,<a name="line.444"></a>
-<span class="sourceLineNo">445</span>      final Configuration configuration)<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      throws IOException {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>    return FSHLogProvider.createWriter(configuration, fs, path, true);<a name="line.447"></a>
-<span class="sourceLineNo">448</span>  }<a name="line.448"></a>
-<span class="sourceLineNo">449</span><a name="line.449"></a>
-<span class="sourceLineNo">450</span>  /**<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * Uses defaults.<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   * @return a writer that won't overwrite files. Caller must close.<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   */<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  @VisibleForTesting<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  public static Writer createWALWriter(final FileSystem fs, final Path path,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      final Configuration configuration)<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      throws IOException {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    return FSHLogProvider.createWriter(configuration, fs, path, false);<a name="line.459"></a>
-<span class="sourceLineNo">460</span>  }<a name="line.460"></a>
-<span class="sourceLineNo">461</span><a name="line.461"></a>
-<span class="sourceLineNo">462</span>  public final WALProvider getWALProvider() {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    return this.provider;<a name="line.463"></a>
-<span class="sourceLineNo">464</span>  }<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>  public final WALProvider getMetaWALProvider() {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    return this.metaProvider.get();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>}<a name="line.469"></a>
+<span class="sourceLineNo">253</span>      provider = createProvider(getProviderClass(META_WAL_PROVIDER,<a name="line.253"></a>
+<span class="sourceLineNo">254</span>          conf.get(WAL_PROVIDER, DEFAULT_WAL_PROVIDER)));<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      provider.init(this, conf, AbstractFSWALProvider.META_WAL_PROVIDER_ID);<a name="line.255"></a>
+<span class="sourceLineNo">256</span>      provider.addWALActionsListener(new MetricsWAL());<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      if (metaProvider.compareAndSet(null, provider)) {<a name="line.257"></a>
+<span class="sourceLineNo">258</span>        return provider;<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      } else {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>        // someone is ahead of us, close and try again.<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        provider.close();<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      }<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    }<a name="line.263"></a>
+<span class="sourceLineNo">264</span>  }<a name="line.264"></a>
+<span class="sourceLineNo">265</span><a name="line.265"></a>
+<span class="sourceLineNo">266</span>  /**<a name="line.266"></a>
+<span class="sourceLineNo">267</span>   * @param region the region which we want to get a WAL for it. Could be null.<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   */<a name="line.268"></a>
+<span class="sourceLineNo">269</span>  public WAL getWAL(RegionInfo region) throws IOException {<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    // use different WAL for hbase:meta<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    if (region != null &amp;&amp; region.isMetaRegion() &amp;&amp;<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      region.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      return getMetaProvider().getWAL(region);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    } else {<a name="line.274"></a>
+<span class="sourceLineNo">275</span>      return provider.getWAL(region);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  }<a name="line.277"></a>
+<span class="sourceLineNo">278</span><a name="line.278"></a>
+<span class="sourceLineNo">279</span>  public Reader createReader(final FileSystem fs, final Path path) throws IOException {<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    return createReader(fs, path, (CancelableProgressable)null);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  }<a name="line.281"></a>
+<span class="sourceLineNo">282</span><a name="line.282"></a>
+<span class="sourceLineNo">283</span>  /**<a name="line.283"></a>
+<span class="sourceLineNo">284</span>   * Create a reader for the WAL. If you are reading from a file that's being written to and need<a name="line.284"></a>
+<span class="sourceLineNo">285</span>   * to reopen it multiple times, use {@link WAL.Reader#reset()} instead of this method<a name="line.285"></a>
+<span class="sourceLineNo">286</span>   * then just seek back to the last known good position.<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * @return A WAL reader.  Close when done with it.<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   * @throws IOException<a name="line.288"></a>
+<span class="sourceLineNo">289</span>   */<a name="line.289"></a>
+<span class="sourceLineNo">290</span>  public Reader createReader(final FileSystem fs, final Path path,<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      CancelableProgressable reporter) throws IOException {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    return createReader(fs, path, reporter, true);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>  public Reader createReader(final FileSystem fs, final Path path, CancelableProgressable reporter,<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      boolean allowCustom) throws IOException {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    Class&lt;? extends AbstractFSWALProvider.Reader&gt; lrClass =<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        allowCustom ? logReaderClass : ProtobufLogReader.class;<a name="line.298"></a>
+<span class="sourceLineNo">299</span>    try {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      // A wal file could be under recovery, so it may take several<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      // tries to get it open. Instead of claiming it is corrupted, retry<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      // to open it up to 5 minutes by default.<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      long startWaiting = EnvironmentEdgeManager.currentTime();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>      long openTimeout = timeoutMillis + startWaiting;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      int nbAttempt = 0;<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      AbstractFSWALProvider.Reader reader = null;<a name="line.306"></a>
+<span class="sourceLineNo">307</span>      while (true) {<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        try {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>          reader = lrClass.getDeclaredConstructor().newInstance();<a name="line.309"></a>
+<span class="sourceLineNo">310</span>          reader.init(fs, path, conf, null);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>          return reader;<a name="line.311"></a>
+<span class="sourceLineNo">312</span>        } catch (IOException e) {<a name="line.312"></a>
+<span class="sourceLineNo">313</span>          if (reader != null) {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>            try {<a name="line.314"></a>
+<span class="sourceLineNo">315</span>              reader.close();<a name="line.315"></a>
+<span class="sourceLineNo">316</span>            } catch (IOException exception) {<a name="line.316"></a>
+<span class="sourceLineNo">317</span>              LOG.warn("Could not close FSDataInputStream" + exception.getMessage());<a name="line.317"></a>
+<span class="sourceLineNo">318</span>              LOG.debug("exception details", exception);<a name="line.318"></a>
+<span class="sourceLineNo">319</span>            }<a name="line.319"></a>
+<span class="sourceLineNo">320</span>          }<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>          String msg = e.getMessage();<a name="line.322"></a>
+<span class="sourceLineNo">323</span>          if (msg != null<a name="line.323"></a>
+<span class="sourceLineNo">324</span>              &amp;&amp; (msg.contains("Cannot obtain block length")<a name="line.324"></a>
+<span class="sourceLineNo">325</span>                  || msg.contains("Could not obtain the last block") || msg<a name="line.325"></a>
+<span class="sourceLineNo">326</span>                    .matches("Blocklist for [^ ]* has changed.*"))) {<a name="line.326"></a>
+<span class="sourceLineNo">327</span>            if (++nbAttempt == 1) {<a name="line.327"></a>
+<span class="sourceLineNo">328</span>              LOG.warn("Lease should have recovered. This is not expected. Will retry", e);<a name="line.328"></a>
+<span class="sourceLineNo">329</span>            }<a name="line.329"></a>
+<span class="sourceLineNo">330</span>            if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.330"></a>
+<span class="sourceLineNo">331</span>              throw new InterruptedIOException("Operation is cancelled");<a name="line.331"></a>
+<span class="sourceLineNo">332</span>            }<a name="line.332"></a>
+<span class="sourceLineNo">333</span>            if (nbAttempt &gt; 2 &amp;&amp; openTimeout &lt; EnvironmentEdgeManager.currentTime()) {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>              LOG.error("Can't open after " + nbAttempt + " attempts and "<a name="line.334"></a>
+<span class="sourceLineNo">335</span>                  + (EnvironmentEdgeManager.currentTime() - startWaiting) + "ms " + " for " + path);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>            } else {<a name="line.336"></a>
+<span class="sourceLineNo">337</span>              try {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>                Thread.sleep(nbAttempt &lt; 3 ? 500 : 1000);<a name="line.338"></a>
+<span class="sourceLineNo">339</span>                continue; // retry<a name="line.339"></a>
+<span class="sourceLineNo">340</span>              } catch (InterruptedException ie) {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>                InterruptedIOException iioe = new InterruptedIOException();<a name="line.341"></a>
+<span class="sourceLineNo">342</span>                iioe.initCause(ie);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>                throw iioe;<a name="line.343"></a>
+<span class="sourceLineNo">344</span>              }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>            }<a name="line.345"></a>
+<span class="sourceLineNo">346</span>            throw new LeaseNotRecoveredException(e);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>          } else {<a name="line.347"></a>
+<span class="sourceLineNo">348</span>            throw e;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>          }<a name="line.349"></a>
+<span class="sourceLineNo">350</span>        }<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      }<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    } catch (IOException ie) {<a name="line.352"></a>
+<span class="sourceLineNo">353</span>      throw ie;<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    } catch (Exception e) {<a name="line.354"></a>
+<span class="sourceLineNo">355</span>      throw new IOException("Cannot get log reader", e);<a name="line.355"></a>
+<span class="sourceLineNo">356</span>    }<a name="line.356"></a>
+<span class="sourceLineNo">357</span>  }<a name="line.357"></a>
+<span class="sourceLineNo">358</span><a name="line.358"></a>
+<span class="sourceLineNo">359</span>  /**<a name="line.359"></a>
+<span class="sourceLineNo">360</span>   * Create a writer for the WAL.<a name="line.360"></a>
+<span class="sourceLineNo">361</span>   * Uses defaults.<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * &lt;p&gt;<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Should be package-private. public only for tests and<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * {@link org.apache.hadoop.hbase.regionserver.wal.Compressor}<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * @return A WAL writer. Close when done with it.<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
+<span class="sourceLineNo">367</span>  public Writer createWALWriter(final FileSystem fs, final Path path) throws IOException {<a name="line.367"></a>
+<span class="sourceLineNo">368</span>    return FSHLogProvider.createWriter(conf, fs, path, false);<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  }<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>  /**<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * Should be package-private, visible for recovery testing.<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   * Uses defaults.<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   * @return an overwritable writer for recovered edits. caller should close.<a name="line.374"></a>
+<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
+<span class="sourceLineNo">376</span>  @VisibleForTesting<a name="line.376"></a>
+<span class="sourceLineNo">377</span>  public Writer createRecoveredEditsWriter(final FileSystem fs, final Path path)<a name="line.377"></a>
+<span class="sourceLineNo">378</span>      throws IOException {<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    return FSHLogProvider.createWriter(conf, fs, path, true);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  // These static methods are currently used where it's impractical to<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  // untangle the reliance on state in the filesystem. They rely on singleton<a name="line.383"></a>
+<span class="sourceLineNo">384</span>  // WALFactory that just provides Reader / Writers.<a name="line.384"></a>
+<span class="sourceLineNo">385</span>  // For now, first Configuration object wins. Practically this just impacts the reader/writer class<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  private static final AtomicReference&lt;WALFactory&gt; singleton = new AtomicReference&lt;&gt;();<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  private static final String SINGLETON_ID = WALFactory.class.getName();<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  <a name="line.388"></a>
+<span class="sourceLineNo">389</span>  // Public only for FSHLog<a name="line.389"></a>
+<span class="sourceLineNo">390</span>  public static WALFactory getInstance(Configuration configuration) {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    WALFactory factory = singleton.get();<a name="line.391"></a>
+<span class="sourceLineNo">392</span>    if (null == factory) {<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      WALFactory temp = new WALFactory(configuration);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>      if (singleton.compareAndSet(null, temp)) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>        factory = temp;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>      } else {<a name="line.396"></a>
+<span class="sourceLineNo">397</span>        // someone else beat us to initializing<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        try {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>          temp.close();<a name="line.399"></a>
+<span class="sourceLineNo">400</span>        } catch (IOException exception) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>          LOG.debug("failed to close temporary singleton. ignoring.", exception);<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        }<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        factory = singleton.get();<a name="line.403"></a>
+<span class="sourceLineNo">404</span>      }<a name="line.404"></a>
+<span class="sourceLineNo">405</span>    }<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    return factory;<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * Create a reader for the given path, accept custom reader classes from conf.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.411"></a>
+<span class="sourceLineNo">412</span>   * @return a WAL Reader, caller must close.<a name="line.412"></a>
+<span class="sourceLineNo">413</span>   */<a name="line.413"></a>
+<span class="sourceLineNo">414</span>  public static Reader createReader(final FileSystem fs, final Path path,<a name="line.414"></a>
+<span class="sourceLineNo">415</span>      final Configuration configuration) throws IOException {<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    return getInstance(configuration).createReader(fs, path);<a name="line.416"></a>
+<span class="sourceLineNo">417</span>  }<a name="line.417"></a>
+<span class="sourceLineNo">418</span><a name="line.418"></a>
+<span class="sourceLineNo">419</span>  /**<a name="line.419"></a>
+<span class="sourceLineNo">420</span>   * Create a reader for the given path, accept custom reader classes from conf.<a name="line.420"></a>
+<span class="sourceLineNo">421</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.421"></a>
+<span class="sourceLineNo">422</span>   * @return a WAL Reader, caller must close.<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   */<a name="line.423"></a>
+<span class="sourceLineNo">424</span>  static Reader createReader(final FileSystem fs, final Path path,<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      final Configuration configuration, final CancelableProgressable reporter) throws IOException {<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    return getInstance(configuration).createReader(fs, path, reporter);<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  }<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  /**<a name="line.429"></a>
+<span class="sourceLineNo">430</span>   * Create a reader for the given path, ignore custom reader classes from conf.<a name="line.430"></a>
+<span class="sourceLineNo">431</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.431"></a>
+<span class="sourceLineNo">432</span>   * only public pending move of {@link org.apache.hadoop.hbase.regionserver.wal.Compressor}<a name="line.432"></a>
+<span class="sourceLineNo">433</span>   * @return a WAL Reader, caller must close.<a name="line.433"></a>
+<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
+<span class="sourceLineNo">435</span>  public static Reader createReaderIgnoreCustomClass(final FileSystem fs, final Path path,<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      final Configuration configuration) throws IOException {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>    return getInstance(configuration).createReader(fs, path, null, false);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>  }<a name="line.438"></a>
+<span class="sourceLineNo">439</span><a name="line.439"></a>
+<span class="sourceLineNo">440</span>  /**<a name="line.440"></a>
+<span class="sourceLineNo">441</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.441"></a>
+<span class="sourceLineNo">442</span>   * Uses defaults.<a name="line.442"></a>
+<span class="sourceLineNo">443</span>   * @return a Writer that will overwrite files. Caller must close.<a name="line.443"></a>
+<span class="sourceLineNo">444</span>   */<a name="line.444"></a>
+<span class="sourceLineNo">445</span>  static Writer createRecoveredEditsWriter(final FileSystem fs, final Path path,<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      final Configuration configuration)<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      throws IOException {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    return FSHLogProvider.createWriter(configuration, fs, path, true);<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  }<a name="line.449"></a>
+<span class="sourceLineNo">450</span><a name="line.450"></a>
+<span class="sourceLineNo">451</span>  /**<a name="line.451"></a>
+<span class="sourceLineNo">452</span>   * If you already have a WALFactory, you should favor the instance method.<a name="line.452"></a>
+<span class="sourceLineNo">453</span>   * Uses defaults.<a name="line.453"></a>
+<span class="sourceLineNo">454</span>   * @return a writer that won't overwrite files. Caller must close.<a name="line.454"></a>
+<span class="sourceLineNo">455</span>   */<a name="line.455"></a>
+<span class="sourceLineNo">456</span>  @VisibleForTesting<a name="line.456"></a>
+<span class="sourceLineNo">457</span>  public static Writer createWALWriter(final FileSystem fs, final Path path,<a name="line.457"></a>
+<span class="sourceLineNo">458</span>      final Configuration configuration)<a name="line.458"></a>
+<span class="sourceLineNo">459</span>      throws IOException {<a name="line.459"></a>
+<span class="sourceLineNo">460</span>    return FSHLogProvider.createWriter(configuration, fs, path, false);<a name="line.460"></a>
+<span class="sourceLineNo">461</span>  }<a name="line.461"></a>
+<span class="sourceLineNo">462</span><a name="line.462"></a>
+<span class="sourceLineNo">463</span>  public final WALProvider getWALProvider() {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>    return this.provider;<a name="line.464"></a>
+<span class="sourceLineNo">465</span>  }<a name="line.465"></a>
+<span class="sourceLineNo">466</span><a name="line.466"></a>
+<span class="sourceLineNo">467</span>  public final WALProvider getMetaWALProvider() {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return this.metaProvider.get();<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span>}<a name="line.470"></a>
 
 
 


[24/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html
index b7b4236..3d1edb3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html
@@ -259,1863 +259,1867 @@
 <span class="sourceLineNo">251</span>   * + Metadata!  + &lt;= See note on BLOCK_METADATA_SPACE above.<a name="line.251"></a>
 <span class="sourceLineNo">252</span>   * ++++++++++++++<a name="line.252"></a>
 <span class="sourceLineNo">253</span>   * &lt;/code&gt;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer)<a name="line.254"></a>
+<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer, boolean)<a name="line.254"></a>
 <span class="sourceLineNo">255</span>   */<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER =<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    @Override<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      // The buf has the file block followed by block metadata.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      ByteBuff newByteBuff;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      if (reuse) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        newByteBuff = buf.slice();<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      } else {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        int len = buf.limit();<a name="line.269"></a>
-<span class="sourceLineNo">270</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      buf.position(buf.limit());<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      long offset = buf.getLong();<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      HFileBlock hFileBlock =<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      return hFileBlock;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    public int getDeserialiserIdentifier() {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return DESERIALIZER_IDENTIFIER;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    @Override<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      // Used only in tests<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  };<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  static {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    DESERIALIZER_IDENTIFIER =<a name="line.298"></a>
-<span class="sourceLineNo">299</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>  }<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  /**<a name="line.302"></a>
-<span class="sourceLineNo">303</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.303"></a>
-<span class="sourceLineNo">304</span>   */<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private HFileBlock(HFileBlock that) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    this(that, false);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
-<span class="sourceLineNo">308</span><a name="line.308"></a>
-<span class="sourceLineNo">309</span>  /**<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.310"></a>
-<span class="sourceLineNo">311</span>   * param.<a name="line.311"></a>
-<span class="sourceLineNo">312</span>   */<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.314"></a>
-<span class="sourceLineNo">315</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    if (bufCopy) {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    } else {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.buf = that.buf.duplicate();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    }<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * is used only while writing blocks and caching,<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   *<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.331"></a>
+<span class="sourceLineNo">256</span>  public static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER = new BlockDeserializer();<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  public static final class BlockDeserializer implements CacheableDeserializer&lt;Cacheable&gt; {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    private BlockDeserializer() {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>    @Override<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        throws IOException {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      // The buf has the file block followed by block metadata.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      ByteBuff newByteBuff;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      if (reuse) {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>        newByteBuff = buf.slice();<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      } else {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        int len = buf.limit();<a name="line.273"></a>
+<span class="sourceLineNo">274</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      buf.position(buf.limit());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      long offset = buf.getLong();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      HFileBlock hFileBlock =<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      return hFileBlock;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>    @Override<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    public int getDeserialiserIdentifier() {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      return DESERIALIZER_IDENTIFIER;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>    @Override<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      // Used only in tests<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  static {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    DESERIALIZER_IDENTIFIER =<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  }<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /**<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   */<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private HFileBlock(HFileBlock that) {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    this(that, false);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * param.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.318"></a>
+<span class="sourceLineNo">319</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    if (bufCopy) {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    } else {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>      this.buf = that.buf.duplicate();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    }<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * is used only while writing blocks and caching,<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.331"></a>
 <span class="sourceLineNo">332</span>   *<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @param offset the file offset the block was read from<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   * @param fileContext HFile meta data<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  @VisibleForTesting<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      HFileContext fileContext) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    this.buf = new SingleByteBuff(b);<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    if (fillHeader) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      overwriteHeader();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    this.buf.rewind();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  /**<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * to that point.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>    buf.rewind();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    final BlockType blockType = BlockType.read(buf);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    final int uncompressedSizeWithoutHeader =<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    int onDiskDataSizeWithHeader;<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    if (usesHBaseChecksum) {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    fileContext = fileContextBuilder.build();<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    this.memType = memType;<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    this.offset = offset;<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    this.buf = buf;<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    this.buf.rewind();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  }<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  /**<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   * Called from constructors.<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      HFileContext fileContext) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    this.blockType = blockType;<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    this.offset = offset;<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    this.fileContext = fileContext;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  /**<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * Parse total on disk size including header and checksum.<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   * @return Size of the block with header included.<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   */<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      boolean verifyChecksum) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      headerSize(verifyChecksum);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * a read of the next block when scanning or running over a file.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  int getNextBlockOnDiskSize() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    return nextBlockOnDiskSize;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  @Override<a name="line.439"></a>
-<span class="sourceLineNo">440</span>  public BlockType getBlockType() {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    return blockType;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  short getDataBlockEncodingId() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    return buf.getShort(headerSize());<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  }<a name="line.451"></a>
-<span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>  /**<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   * @return the on-disk size of header + data part + checksum.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>   */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  public int getOnDiskSizeWithHeader() {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  int getOnDiskSizeWithoutHeader() {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    return onDiskSizeWithoutHeader;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>  }<a name="line.465"></a>
-<span class="sourceLineNo">466</span><a name="line.466"></a>
-<span class="sourceLineNo">467</span>  /**<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.468"></a>
-<span class="sourceLineNo">469</span>   */<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   int getUncompressedSizeWithoutHeader() {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    return uncompressedSizeWithoutHeader;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /**<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   *         -1 if unknown<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  long getPrevBlockOffset() {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    return prevBlockOffset;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>  }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>  /**<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * is modified as side-effect.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   */<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  private void overwriteHeader() {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    buf.rewind();<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    blockType.write(buf);<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    buf.putLong(prevBlockOffset);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>  }<a name="line.497"></a>
-<span class="sourceLineNo">498</span><a name="line.498"></a>
-<span class="sourceLineNo">499</span>  /**<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * Returns a buffer that does not include the header or checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   *<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    ByteBuff dup = getBufferReadOnly();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    // Now set it up so Buffer spans content only -- no header or no checksums.<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    return dup.position(headerSize()).limit(buf.limit() - totalChecksumBytes()).slice();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Returns a read-only duplicate of the buffer this block stores internally ready to be read.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * Clients must not modify the buffer object though they may set position and limit on the<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   * returned buffer since we pass back a duplicate. This method has to be public because it is used<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * filter lookup, but has to be used with caution. Buffer holds header, block content,<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * and any follow-on checksums if present.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   *<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @return the buffer of this block for read-only operations<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   */<a name="line.519"></a>
-<span class="sourceLineNo">520</span>  public ByteBuff getBufferReadOnly() {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    // TODO: ByteBuf does not support asReadOnlyBuffer(). Fix.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    ByteBuff dup = this.buf.duplicate();<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    assert dup.position() == 0;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return dup;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  @VisibleForTesting<a name="line.527"></a>
-<span class="sourceLineNo">528</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String fieldName) throws IOException {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    if (valueFromBuf != valueFromField) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  @VisibleForTesting<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      throws IOException {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    if (valueFromBuf != valueFromField) {<a name="line.539"></a>
-<span class="sourceLineNo">540</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.540"></a>
-<span class="sourceLineNo">541</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  }<a name="line.543"></a>
-<span class="sourceLineNo">544</span><a name="line.544"></a>
-<span class="sourceLineNo">545</span>  /**<a name="line.545"></a>
-<span class="sourceLineNo">546</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.547"></a>
-<span class="sourceLineNo">548</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * This function is primary for testing and debugging, and is not<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * Used by tests only.<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   */<a name="line.552"></a>
-<span class="sourceLineNo">553</span>  @VisibleForTesting<a name="line.553"></a>
-<span class="sourceLineNo">554</span>  void sanityCheck() throws IOException {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>    // Duplicate so no side-effects<a name="line.555"></a>
-<span class="sourceLineNo">556</span>    ByteBuff dup = this.buf.duplicate().rewind();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    sanityCheckAssertion(BlockType.read(dup), blockType);<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    sanityCheckAssertion(dup.getInt(), onDiskSizeWithoutHeader, "onDiskSizeWithoutHeader");<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    sanityCheckAssertion(dup.getInt(), uncompressedSizeWithoutHeader,<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        "uncompressedSizeWithoutHeader");<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    sanityCheckAssertion(dup.getLong(), prevBlockOffset, "prevBlockOffset");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      sanityCheckAssertion(dup.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      sanityCheckAssertion(dup.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.567"></a>
-<span class="sourceLineNo">568</span>          "bytesPerChecksum");<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      sanityCheckAssertion(dup.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
-<span class="sourceLineNo">571</span><a name="line.571"></a>
-<span class="sourceLineNo">572</span>    int cksumBytes = totalChecksumBytes();<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (dup.limit() != expectedBufLimit) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      throw new AssertionError("Expected limit " + expectedBufLimit + ", got " + dup.limit());<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    int hdrSize = headerSize();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    if (dup.capacity() != expectedBufLimit &amp;&amp; dup.capacity() != expectedBufLimit + hdrSize) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      throw new AssertionError("Invalid buffer capacity: " + dup.capacity() +<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  @Override<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  public String toString() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    StringBuilder sb = new StringBuilder()<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      .append("[")<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      .append("blockType=").append(blockType)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      .append(", fileOffset=").append(offset)<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      .append(", headerSize=").append(headerSize())<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      .append(", onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      .append(", uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      .append(", prevBlockOffset=").append(prevBlockOffset)<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      .append(", isUseHBaseChecksum=").append(fileContext.isUseHBaseChecksum());<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      sb.append(", checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.599"></a>
-<span class="sourceLineNo">600</span>        .append(", bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        .append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      sb.append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    String dataBegin = null;<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    if (buf.hasArray()) {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    } else {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.613"></a>
-<span class="sourceLineNo">614</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>    sb.append(", getOnDiskSizeWithHeader=").append(getOnDiskSizeWithHeader())<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      .append(", totalChecksumBytes=").append(totalChecksumBytes())<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      .append(", isUnpacked=").append(isUnpacked())<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      .append(", buf=[").append(buf).append("]")<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      .append(", dataBeginsWith=").append(dataBegin)<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      .append(", fileContext=").append(fileContext)<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      .append(", nextBlockOnDiskSize=").append(nextBlockOnDiskSize)<a name="line.624"></a>
-<span class="sourceLineNo">625</span>      .append("]");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    return sb.toString();<a name="line.626"></a>
-<span class="sourceLineNo">627</span>  }<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>  /**<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      // encryption details.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();<a name="line.645"></a>
-<span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>    ByteBuff dup = this.buf.duplicate();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    dup.position(this.headerSize());<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    dup = dup.slice();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      dup);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    return unpacked;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  /**<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   * Always allocates a new buffer of the correct size. Copies header bytes<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * from the existing buffer. Does not change header fields.<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   * Reserve room to keep checksum bytes too.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  private void allocateBuffer() {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    int cksumBytes = totalChecksumBytes();<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    int headerSize = headerSize();<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    int capacityNeeded = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // TODO we need consider allocating offheap here?<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);<a name="line.667"></a>
-<span class="sourceLineNo">668</span><a name="line.668"></a>
-<span class="sourceLineNo">669</span>    // Copy header bytes into newBuf.<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    // newBuf is HBB so no issue in calling array()<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    buf.position(0);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    buf.get(newBuf.array(), newBuf.arrayOffset(), headerSize);<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    buf = new SingleByteBuff(newBuf);<a name="line.674"></a>
-<span class="sourceLineNo">675</span>    // set limit to exclude next block's header<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * Return true when this block's buffer has been unpacked, false otherwise. Note this is a<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   * calculated heuristic, not tracked attribute of the block.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>   */<a name="line.682"></a>
-<span class="sourceLineNo">683</span>  public boolean isUnpacked() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>    final int cksumBytes = totalChecksumBytes();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    final int headerSize = headerSize();<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    final int bufCapacity = buf.capacity();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;<a name="line.688"></a>
-<span class="sourceLineNo">689</span>  }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>  /** An additional sanity-check in case no compression or encryption is being used. */<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  @VisibleForTesting<a name="line.692"></a>
-<span class="sourceLineNo">693</span>  void sanityCheckUncompressedSize() throws IOException {<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader + totalChecksumBytes()) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      throw new IOException("Using no compression but "<a name="line.695"></a>
-<span class="sourceLineNo">696</span>          + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "<a name="line.696"></a>
-<span class="sourceLineNo">697</span>          + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader<a name="line.697"></a>
-<span class="sourceLineNo">698</span>          + ", numChecksumbytes=" + totalChecksumBytes());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    }<a name="line.699"></a>
-<span class="sourceLineNo">700</span>  }<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>  /**<a name="line.702"></a>
-<span class="sourceLineNo">703</span>   * Cannot be {@link #UNSET}. Must be a legitimate value. Used re-making the {@link BlockCacheKey} when<a name="line.703"></a>
-<span class="sourceLineNo">704</span>   * block is returned to the cache.<a name="line.704"></a>
-<span class="sourceLineNo">705</span>   * @return the offset of this block in the file it was read from<a name="line.705"></a>
-<span class="sourceLineNo">706</span>   */<a name="line.706"></a>
-<span class="sourceLineNo">707</span>  long getOffset() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    if (offset &lt; 0) {<a name="line.708"></a>
-<span class="sourceLineNo">709</span>      throw new IllegalStateException("HFile block offset not initialized properly");<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    }<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    return offset;<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  /**<a name="line.714"></a>
-<span class="sourceLineNo">715</span>   * @return a byte stream reading the data + checksum of this block<a name="line.715"></a>
-<span class="sourceLineNo">716</span>   */<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  DataInputStream getByteStream() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    ByteBuff dup = this.buf.duplicate();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    dup.position(this.headerSize());<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new DataInputStream(new ByteBuffInputStream(dup));<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>  public long heapSize() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    long size = ClassSize.align(<a name="line.725"></a>
-<span class="sourceLineNo">726</span>        ClassSize.OBJECT +<a name="line.726"></a>
-<span class="sourceLineNo">727</span>        // Block type, multi byte buffer, MemoryType and meta references<a name="line.727"></a>
-<span class="sourceLineNo">728</span>        4 * ClassSize.REFERENCE +<a name="line.728"></a>
-<span class="sourceLineNo">729</span>        // On-disk size, uncompressed size, and next block's on-disk size<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        // bytePerChecksum and onDiskDataSize<a name="line.730"></a>
-<span class="sourceLineNo">731</span>        4 * Bytes.SIZEOF_INT +<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        // This and previous block offset<a name="line.732"></a>
-<span class="sourceLineNo">733</span>        2 * Bytes.SIZEOF_LONG +<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        // Heap size of the meta object. meta will be always not null.<a name="line.734"></a>
-<span class="sourceLineNo">735</span>        fileContext.heapSize()<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    );<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    if (buf != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      // Deep overhead of the byte buffer. Needs to be aligned separately.<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    return ClassSize.align(size);<a name="line.743"></a>
-<span class="sourceLineNo">744</span>  }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>  /**<a name="line.746"></a>
-<span class="sourceLineNo">747</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.747"></a>
-<span class="sourceLineNo">748</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.748"></a>
-<span class="sourceLineNo">749</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but specifies a<a name="line.749"></a>
-<span class="sourceLineNo">750</span>   * number of "extra" bytes to also optionally read.<a name="line.750"></a>
-<span class="sourceLineNo">751</span>   *<a name="line.751"></a>
-<span class="sourceLineNo">752</span>   * @param in the input stream to read from<a name="line.752"></a>
-<span class="sourceLineNo">753</span>   * @param buf the buffer to read into<a name="line.753"></a>
-<span class="sourceLineNo">754</span>   * @param bufOffset the destination offset in the buffer<a name="line.754"></a>
-<span class="sourceLineNo">755</span>   * @param necessaryLen the number of bytes that are absolutely necessary to read<a name="line.755"></a>
-<span class="sourceLineNo">756</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.756"></a>
-<span class="sourceLineNo">757</span>   * @return true if succeeded reading the extra bytes<a name="line.757"></a>
-<span class="sourceLineNo">758</span>   * @throws IOException if failed to read the necessary bytes<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   */<a name="line.759"></a>
-<span class="sourceLineNo">760</span>  static boolean readWithExtra(InputStream in, byte[] buf,<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      int bufOffset, int necessaryLen, int extraLen) throws IOException {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    while (bytesRemaining &gt; 0) {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      int ret = in.read(buf, bufOffset, bytesRemaining);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      if (ret == -1 &amp;&amp; bytesRemaining &lt;= extraLen) {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        // We could not read the "extra data", but that is OK.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>        break;<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      }<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (ret &lt; 0) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>        throw new IOException("Premature EOF from inputStream (read "<a name="line.770"></a>
-<span class="sourceLineNo">771</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.771"></a>
-<span class="sourceLineNo">772</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.772"></a>
-<span class="sourceLineNo">773</span>            + "successfully read "<a name="line.773"></a>
-<span class="sourceLineNo">774</span>            + (necessaryLen + extraLen - bytesRemaining));<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      }<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      bufOffset += ret;<a name="line.776"></a>
-<span class="sourceLineNo">777</span>      bytesRemaining -= ret;<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    }<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    return bytesRemaining &lt;= 0;<a name="line.779"></a>
-<span class="sourceLineNo">780</span>  }<a name="line.780"></a>
-<span class="sourceLineNo">781</span><a name="line.781"></a>
-<span class="sourceLineNo">782</span>  /**<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.784"></a>
-<span class="sourceLineNo">785</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but uses<a name="line.785"></a>
-<span class="sourceLineNo">786</span>   * positional read and specifies a number of "extra" bytes that would be<a name="line.786"></a>
-<span class="sourceLineNo">787</span>   * desirable but not absolutely necessary to read.<a name="line.787"></a>
-<span class="sourceLineNo">788</span>   *<a name="line.788"></a>
-<span class="sourceLineNo">789</span>   * @param in the input stream to read from<a name="line.789"></a>
-<span class="sourceLineNo">790</span>   * @param position the position within the stream from which to start reading<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * @param buf the buffer to read into<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * @param bufOffset the destination offset in the buffer<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   * @param necessaryLen the number of bytes that are absolutely necessary to<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   *     read<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   * @return true if and only if extraLen is &gt; 0 and reading those extra bytes<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   *     was successful<a name="line.797"></a>
-<span class="sourceLineNo">798</span>   * @throws IOException if failed to read the necessary bytes<a name="line.798"></a>
-<span class="sourceLineNo">799</span>   */<a name="line.799"></a>
-<span class="sourceLineNo">800</span>  @VisibleForTesting<a name="line.800"></a>
-<span class="sourceLineNo">801</span>  static boolean positionalReadWithExtra(FSDataInputStream in,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      long position, byte[] buf, int bufOffset, int necessaryLen, int extraLen)<a name="line.802"></a>
-<span class="sourceLineNo">803</span>      throws IOException {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>    int bytesRead = 0;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    while (bytesRead &lt; necessaryLen) {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      int ret = in.read(position, buf, bufOffset, bytesRemaining);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>      if (ret &lt; 0) {<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        throw new IOException("Premature EOF from inputStream (positional read "<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            + "successfully read " + bytesRead);<a name="line.812"></a>
-<span class="sourceLineNo">813</span>      }<a name="line.813"></a>
-<span class="sourceLineNo">814</span>      position += ret;<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      bufOffset += ret;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      bytesRemaining -= ret;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      bytesRead += ret;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>    }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    return bytesRead != necessaryLen &amp;&amp; bytesRemaining &lt;= 0;<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>   * Unified version 2 {@link HFile} block writer. The intended usage pattern<a name="line.823"></a>
-<span class="sourceLineNo">824</span>   * is as follows:<a name="line.824"></a>
-<span class="sourceLineNo">825</span>   * &lt;ol&gt;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>   * &lt;li&gt;Construct an {@link HFileBlock.Writer}, providing a compression algorithm.<a name="line.826"></a>
-<span class="sourceLineNo">827</span>   * &lt;li&gt;Call {@link Writer#startWriting} and get a data stream to write to.<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * &lt;li&gt;Write your data into the stream.<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * &lt;li&gt;Call Writer#writeHeaderAndData(FSDataOutputStream) as many times as you need to.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   * store the serialized block into an external stream.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * &lt;li&gt;Repeat to write more blocks.<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   * &lt;/ol&gt;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   * &lt;p&gt;<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   */<a name="line.834"></a>
-<span class="sourceLineNo">835</span>  static class Writer {<a name="line.835"></a>
-<span class="sourceLineNo">836</span>    private enum State {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>      INIT,<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      WRITING,<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      BLOCK_READY<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
-<span class="sourceLineNo">841</span><a name="line.841"></a>
-<span class="sourceLineNo">842</span>    /** Writer state. Used to ensure the correct usage protocol. */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    private State state = State.INIT;<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>    /** Data block encoder used for data blocks */<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    private final HFileDataBlockEncoder dataBlockEncoder;<a name="line.846"></a>
-<span class="sourceLineNo">847</span><a name="line.847"></a>
-<span class="sourceLineNo">848</span>    private HFileBlockEncodingContext dataBlockEncodingCtx;<a name="line.848"></a>
-<span class="sourceLineNo">849</span><a name="line.849"></a>
-<span class="sourceLineNo">850</span>    /** block encoding context for non-data blocks*/<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;<a name="line.851"></a>
-<span class="sourceLineNo">852</span><a name="line.852"></a>
-<span class="sourceLineNo">853</span>    /**<a name="line.853"></a>
-<span class="sourceLineNo">854</span>     * The stream we use to accumulate data into a block in an uncompressed format.<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * We reset this stream at the end of each block and reuse it. The<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * header is written as the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes into this<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * stream.<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    private ByteArrayOutputStream baosInMemory;<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>    /**<a name="line.861"></a>
-<span class="sourceLineNo">862</span>     * Current block type. Set in {@link #startWriting(BlockType)}. Could be<a name="line.862"></a>
-<span class="sourceLineNo">863</span>     * changed in {@link #finishBlock()} from {@link BlockType#DATA}<a name="line.863"></a>
-<span class="sourceLineNo">864</span>     * to {@link BlockType#ENCODED_DATA}.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>     */<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    private BlockType blockType;<a name="line.866"></a>
-<span class="sourceLineNo">867</span><a name="line.867"></a>
-<span class="sourceLineNo">868</span>    /**<a name="line.868"></a>
-<span class="sourceLineNo">869</span>     * A stream that we write uncompressed bytes to, which compresses them and<a name="line.869"></a>
-<span class="sourceLineNo">870</span>     * writes them to {@link #baosInMemory}.<a name="line.870"></a>
-<span class="sourceLineNo">871</span>     */<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    private DataOutputStream userDataStream;<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // Size of actual data being written. Not considering the block encoding/compression. This<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    // includes the header size also.<a name="line.875"></a>
-<span class="sourceLineNo">876</span>    private int unencodedDataSizeWritten;<a name="line.876"></a>
+<span class="sourceLineNo">333</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.335"></a>
+<span class="sourceLineNo">336</span>   *<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * @param offset the file offset the block was read from<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * @param fileContext HFile meta data<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      HFileContext fileContext) {<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.352"></a>
+<span class="sourceLineNo">353</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    this.buf = new SingleByteBuff(b);<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    if (fillHeader) {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      overwriteHeader();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    this.buf.rewind();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * to that point.<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>    buf.rewind();<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    final BlockType blockType = BlockType.read(buf);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    final int uncompressedSizeWithoutHeader =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    int onDiskDataSizeWithHeader;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    if (usesHBaseChecksum) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    } else {<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    fileContext = fileContextBuilder.build();<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    this.memType = memType;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    this.offset = offset;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    this.buf = buf;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    this.buf.rewind();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  /**<a name="line.405"></a>
+<span class="sourceLineNo">406</span>   * Called from constructors.<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      HFileContext fileContext) {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>    this.blockType = blockType;<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    this.offset = offset;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    this.fileContext = fileContext;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * Parse total on disk size including header and checksum.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   * @return Size of the block with header included.<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      boolean verifyChecksum) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      headerSize(verifyChecksum);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.435"></a>
+<span class="sourceLineNo">436</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.436"></a>
+<span class="sourceLineNo">437</span>   * a read of the next block when scanning or running over a file.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  int getNextBlockOnDiskSize() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>    return nextBlockOnDiskSize;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  public BlockType getBlockType() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    return blockType;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  short getDataBlockEncodingId() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>    }<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    return buf.getShort(headerSize());<a name="line.454"></a>
+<span class="sourceLineNo">455</span>  }<a name="line.455"></a>
+<span class="sourceLineNo">456</span><a name="line.456"></a>
+<span class="sourceLineNo">457</span>  /**<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   * @return the on-disk size of header + data part + checksum.<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   */<a name="line.459"></a>
+<span class="sourceLineNo">460</span>  public int getOnDiskSizeWithHeader() {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.461"></a>
+<span class="sourceLineNo">462</span>  }<a name="line.462"></a>
+<span class="sourceLineNo">463</span><a name="line.463"></a>
+<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
+<span class="sourceLineNo">465</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.465"></a>
+<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  int getOnDiskSizeWithoutHeader() {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return onDiskSizeWithoutHeader;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   int getUncompressedSizeWithoutHeader() {<a name="line.474"></a>
+<span class="sourceLineNo">475</span>    return uncompressedSizeWithoutHeader;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>  }<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>  /**<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   *         -1 if unknown<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   */<a name="line.481"></a>
+<span class="sourceLineNo">482</span>  long getPrevBlockOffset() {<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    return prevBlockOffset;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /**<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * is modified as side-effect.<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   */<a name="line.489"></a>
+<span class="sourceLineNo">490</span>  private void overwriteHeader() {<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    buf.rewind();<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    blockType.write(buf);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    buf.putLong(prevBlockOffset);<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.498"></a>
+<span class="sourceLineNo">499</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    }<a name="line.500"></a>
+<span class="sourceLineNo">501</span>  }<a name="line.501"></a>
+<span class="sourceLineNo">502</span><a name="line.502"></a>
+<span class=

<TRUNCATED>

[51/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/7cf6034b
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/7cf6034b
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/7cf6034b

Branch: refs/heads/asf-site
Commit: 7cf6034ba8af6b95a6dda060a311d501f809196c
Parents: a44d796
Author: jenkins <bu...@apache.org>
Authored: Thu Aug 2 19:51:25 2018 +0000
Committer: jenkins <bu...@apache.org>
Committed: Thu Aug 2 19:51:25 2018 +0000

----------------------------------------------------------------------
 acid-semantics.html                             |     4 +-
 apache_hbase_reference_guide.pdf                | 28731 +++++++++--------
 book.html                                       |   120 +-
 bulk-loads.html                                 |     4 +-
 checkstyle-aggregate.html                       | 20154 ++++++------
 checkstyle.rss                                  |   118 +-
 coc.html                                        |     4 +-
 dependencies.html                               |     4 +-
 dependency-convergence.html                     |     4 +-
 dependency-info.html                            |     4 +-
 dependency-management.html                      |     4 +-
 devapidocs/allclasses-frame.html                |     3 +-
 devapidocs/allclasses-noframe.html              |     3 +-
 devapidocs/constant-values.html                 |    25 +-
 devapidocs/index-all.html                       |    91 +-
 .../backup/impl/BackupSystemTable.WALItem.html  |    18 +-
 .../hbase/backup/impl/BackupSystemTable.html    |   290 +-
 .../backup/impl/IncrementalBackupManager.html   |    22 +-
 .../class-use/BackupSystemTable.WALItem.html    |     4 +-
 .../hbase/backup/master/BackupLogCleaner.html   |     6 +-
 .../hadoop/hbase/backup/package-tree.html       |     2 +-
 .../hadoop/hbase/client/package-tree.html       |    26 +-
 .../hadoop/hbase/coprocessor/package-tree.html  |     2 +-
 .../hadoop/hbase/filter/package-tree.html       |    10 +-
 .../hbase/io/hfile/CacheableDeserializer.html   |     4 +
 .../hfile/CacheableDeserializerIdManager.html   |    65 +-
 .../io/hfile/HFileBlock.BlockDeserializer.html  |   349 +
 .../io/hfile/HFileBlock.BlockIterator.html      |    10 +-
 .../io/hfile/HFileBlock.BlockWritable.html      |     6 +-
 .../hbase/io/hfile/HFileBlock.FSReader.html     |    18 +-
 .../hbase/io/hfile/HFileBlock.FSReaderImpl.html |    58 +-
 .../io/hfile/HFileBlock.PrefetchedHeader.html   |    12 +-
 .../hbase/io/hfile/HFileBlock.Writer.State.html |    12 +-
 .../hbase/io/hfile/HFileBlock.Writer.html       |    80 +-
 .../hadoop/hbase/io/hfile/HFileBlock.html       |   130 +-
 .../hfile/bucket/BucketCache.BucketEntry.html   |    97 +-
 .../bucket/BucketCache.BucketEntryGroup.html    |    18 +-
 .../hfile/bucket/BucketCache.RAMQueueEntry.html |    26 +-
 .../BucketCache.SharedMemoryBucketEntry.html    |    22 +-
 .../bucket/BucketCache.StatisticsThread.html    |     8 +-
 .../hfile/bucket/BucketCache.WriterThread.html  |    16 +-
 .../hbase/io/hfile/bucket/BucketCache.html      |   383 +-
 .../hbase/io/hfile/bucket/BucketCacheStats.html |     4 +-
 .../hbase/io/hfile/bucket/BucketProtoUtils.html |   373 +
 .../io/hfile/bucket/ByteBufferIOEngine.html     |     4 +-
 .../hadoop/hbase/io/hfile/bucket/IOEngine.html  |     4 +-
 .../hbase/io/hfile/bucket/UniqueIndexMap.html   |   378 -
 .../bucket/UnsafeSharedMemoryBucketEntry.html   |     6 +-
 .../hfile/bucket/class-use/BucketAllocator.html |     3 +-
 .../class-use/BucketAllocatorException.html     |     7 +-
 .../class-use/BucketCache.BucketEntry.html      |    18 +-
 .../io/hfile/bucket/class-use/BucketCache.html  |    13 +
 .../bucket/class-use/BucketProtoUtils.html      |   125 +
 .../bucket/class-use/CacheFullException.html    |     3 +-
 .../io/hfile/bucket/class-use/IOEngine.html     |     3 +-
 .../hfile/bucket/class-use/UniqueIndexMap.html  |   193 -
 .../hbase/io/hfile/bucket/package-frame.html    |     2 +-
 .../hbase/io/hfile/bucket/package-summary.html  |    20 +-
 .../hbase/io/hfile/bucket/package-tree.html     |     2 +-
 .../hbase/io/hfile/bucket/package-use.html      |     5 -
 .../hbase/io/hfile/class-use/BlockCacheKey.html |    15 +-
 .../hbase/io/hfile/class-use/BlockPriority.html |    13 +
 .../hbase/io/hfile/class-use/BlockType.html     |    38 +
 .../hfile/class-use/Cacheable.MemoryType.html   |     6 +
 .../hbase/io/hfile/class-use/Cacheable.html     |    14 +-
 .../hfile/class-use/CacheableDeserializer.html  |    27 +-
 .../class-use/HFileBlock.BlockDeserializer.html |   125 +
 .../hbase/io/hfile/class-use/HFileBlock.html    |    10 +
 .../hadoop/hbase/io/hfile/package-frame.html    |     1 +
 .../hadoop/hbase/io/hfile/package-summary.html  |    71 +-
 .../hadoop/hbase/io/hfile/package-tree.html     |     7 +-
 .../hadoop/hbase/io/hfile/package-use.html      |    11 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   |     2 +-
 .../hadoop/hbase/mapreduce/package-tree.html    |     2 +-
 .../hadoop/hbase/master/MasterRpcServices.html  |   170 +-
 .../hadoop/hbase/master/package-tree.html       |     6 +-
 .../master/procedure/DisableTableProcedure.html |     8 +-
 .../hbase/master/procedure/package-tree.html    |     4 +-
 .../hadoop/hbase/monitoring/package-tree.html   |     2 +-
 .../hadoop/hbase/nio/class-use/ByteBuff.html    |    10 +
 .../org/apache/hadoop/hbase/package-tree.html   |    14 +-
 .../hadoop/hbase/procedure2/package-tree.html   |     4 +-
 .../hadoop/hbase/quotas/package-tree.html       |     6 +-
 .../hadoop/hbase/regionserver/package-tree.html |    16 +-
 .../regionserver/querymatcher/package-tree.html |     4 +-
 .../hbase/regionserver/wal/package-tree.html    |     2 +-
 .../hadoop/hbase/thrift/package-tree.html       |     2 +-
 .../apache/hadoop/hbase/util/package-tree.html  |    10 +-
 .../hbase/wal/SyncReplicationWALProvider.html   |    25 +-
 .../org/apache/hadoop/hbase/wal/WALFactory.html |    99 +-
 .../hadoop/hbase/wal/class-use/WALProvider.html |     6 +-
 devapidocs/overview-tree.html                   |     3 +-
 devapidocs/serialized-form.html                 |    28 -
 .../org/apache/hadoop/hbase/Version.html        |     6 +-
 .../backup/impl/BackupSystemTable.WALItem.html  |  3880 +--
 .../hbase/backup/impl/BackupSystemTable.html    |  3880 +--
 ...ncrementalBackupManager.NewestLogFilter.html |   228 +-
 .../backup/impl/IncrementalBackupManager.html   |   228 +-
 .../hbase/backup/master/BackupLogCleaner.html   |    89 +-
 ...stFutureImpl.ReplicaCallIssuingRunnable.html |    24 +-
 ...yncRequestFutureImpl.ReplicaResultState.html |    24 +-
 .../client/AsyncRequestFutureImpl.Retry.html    |    24 +-
 ...tFutureImpl.SingleServerRequestRunnable.html |    24 +-
 .../hbase/client/AsyncRequestFutureImpl.html    |    24 +-
 .../hfile/CacheableDeserializerIdManager.html   |    80 +-
 .../io/hfile/HFileBlock.BlockDeserializer.html  |  2186 ++
 .../io/hfile/HFileBlock.BlockIterator.html      |  3682 +--
 .../io/hfile/HFileBlock.BlockWritable.html      |  3682 +--
 .../hbase/io/hfile/HFileBlock.FSReader.html     |  3682 +--
 .../hbase/io/hfile/HFileBlock.FSReaderImpl.html |  3682 +--
 .../hbase/io/hfile/HFileBlock.Header.html       |  3682 +--
 .../io/hfile/HFileBlock.PrefetchedHeader.html   |  3682 +--
 .../hbase/io/hfile/HFileBlock.Writer.State.html |  3682 +--
 .../hbase/io/hfile/HFileBlock.Writer.html       |  3682 +--
 .../hadoop/hbase/io/hfile/HFileBlock.html       |  3682 +--
 .../hfile/bucket/BucketCache.BucketEntry.html   |  3127 +-
 .../bucket/BucketCache.BucketEntryGroup.html    |  3127 +-
 .../hfile/bucket/BucketCache.RAMQueueEntry.html |  3127 +-
 .../BucketCache.SharedMemoryBucketEntry.html    |  3127 +-
 .../bucket/BucketCache.StatisticsThread.html    |  3127 +-
 .../hfile/bucket/BucketCache.WriterThread.html  |  3127 +-
 .../hbase/io/hfile/bucket/BucketCache.html      |  3127 +-
 .../hbase/io/hfile/bucket/BucketProtoUtils.html |   263 +
 .../hbase/io/hfile/bucket/UniqueIndexMap.html   |   128 -
 .../MasterRpcServices.BalanceSwitchMode.html    |  3376 +-
 .../hadoop/hbase/master/MasterRpcServices.html  |  3376 +-
 .../AbstractStateMachineTableProcedure.html     |     2 +-
 .../master/procedure/DeleteTableProcedure.html  |    12 +-
 .../master/procedure/DisableTableProcedure.html |   135 +-
 ...edureExecutor.CompletedProcedureCleaner.html |     2 +-
 ...dureExecutor.CompletedProcedureRetainer.html |     2 +-
 .../ProcedureExecutor.FailedProcedure.html      |     2 +-
 ...ProcedureExecutor.KeepAliveWorkerThread.html |     2 +-
 ...edureExecutor.ProcedureExecutorListener.html |     2 +-
 .../procedure2/ProcedureExecutor.Testing.html   |     2 +-
 .../ProcedureExecutor.WorkerMonitor.html        |     2 +-
 .../ProcedureExecutor.WorkerThread.html         |     2 +-
 .../hbase/procedure2/ProcedureExecutor.html     |     2 +-
 .../HRegion.BatchOperation.Visitor.html         |     2 +-
 .../regionserver/HRegion.BatchOperation.html    |     2 +-
 .../regionserver/HRegion.BulkLoadListener.html  |     2 +-
 .../HRegion.FlushResult.Result.html             |     2 +-
 .../hbase/regionserver/HRegion.FlushResult.html |     2 +-
 .../regionserver/HRegion.FlushResultImpl.html   |     2 +-
 .../HRegion.MutationBatchOperation.html         |     2 +-
 .../HRegion.ObservedExceptionsInBatch.html      |     2 +-
 .../HRegion.PrepareFlushResult.html             |     2 +-
 .../regionserver/HRegion.RegionScannerImpl.html |     2 +-
 .../HRegion.ReplayBatchOperation.html           |     2 +-
 .../regionserver/HRegion.RowLockContext.html    |     2 +-
 .../hbase/regionserver/HRegion.RowLockImpl.html |     2 +-
 .../hbase/regionserver/HRegion.WriteState.html  |     2 +-
 .../hadoop/hbase/regionserver/HRegion.html      |     2 +-
 ....DefaultSyncReplicationPeerInfoProvider.html |     8 +-
 .../hbase/wal/SyncReplicationWALProvider.html   |     8 +-
 .../hadoop/hbase/wal/WALFactory.Providers.html  |   761 +-
 .../org/apache/hadoop/hbase/wal/WALFactory.html |   761 +-
 downloads.html                                  |     4 +-
 export_control.html                             |     4 +-
 index.html                                      |     4 +-
 integration.html                                |     4 +-
 issue-tracking.html                             |     4 +-
 license.html                                    |     4 +-
 mail-lists.html                                 |     4 +-
 metrics.html                                    |     4 +-
 old_news.html                                   |     4 +-
 plugin-management.html                          |     4 +-
 plugins.html                                    |     4 +-
 poweredbyhbase.html                             |     4 +-
 project-info.html                               |     4 +-
 project-reports.html                            |     4 +-
 project-summary.html                            |     4 +-
 pseudo-distributed.html                         |     4 +-
 replication.html                                |     4 +-
 resources.html                                  |     4 +-
 source-repository.html                          |     4 +-
 sponsors.html                                   |     4 +-
 supportingprojects.html                         |     4 +-
 team-list.html                                  |     4 +-
 testapidocs/index-all.html                      |    10 +
 .../apache/hadoop/hbase/MiniHBaseCluster.html   |   324 +-
 .../apache/hadoop/hbase/MiniHBaseCluster.html   |  1768 +-
 testdevapidocs/allclasses-frame.html            |     1 +
 testdevapidocs/allclasses-noframe.html          |     1 +
 testdevapidocs/constant-values.html             |    61 +
 testdevapidocs/index-all.html                   |    80 +
 ...ter.ServerNameIgnoreStartCodeComparator.html |     6 +-
 .../hadoop/hbase/DistributedHBaseCluster.html   |   212 +-
 .../org/apache/hadoop/hbase/HBaseCluster.html   |   253 +-
 ...lusterManager.CommandProvider.Operation.html |    12 +-
 .../HBaseClusterManager.CommandProvider.html    |    12 +-
 ...lusterManager.HBaseShellCommandProvider.html |    10 +-
 ...usterManager.HadoopShellCommandProvider.html |    10 +-
 .../hbase/HBaseClusterManager.RemoteShell.html  |    20 +-
 ...erManager.ZookeeperShellCommandProvider.html |    12 +-
 .../hadoop/hbase/HBaseClusterManager.html       |    28 +-
 ...aseCluster.MiniHBaseClusterRegionServer.html |    22 +-
 ...eCluster.SingleFileSystemShutdownThread.html |     8 +-
 .../apache/hadoop/hbase/MiniHBaseCluster.html   |   332 +-
 .../hadoop/hbase/backup/package-tree.html       |     2 +-
 .../chaos/actions/Action.ActionContext.html     |    12 +-
 .../hadoop/hbase/chaos/actions/Action.html      |   268 +-
 .../hbase/chaos/actions/AddColumnAction.html    |     4 +-
 .../chaos/actions/BatchRestartRsAction.html     |     6 +-
 .../chaos/actions/ChangeBloomFilterAction.html  |     4 +-
 .../chaos/actions/ChangeCompressionAction.html  |     4 +-
 .../chaos/actions/ChangeEncodingAction.html     |     4 +-
 .../chaos/actions/ChangeSplitPolicyAction.html  |     4 +-
 .../chaos/actions/ChangeVersionsAction.html     |     4 +-
 .../hbase/chaos/actions/CompactMobAction.html   |     4 +-
 .../CompactRandomRegionOfTableAction.html       |     4 +-
 .../hbase/chaos/actions/CompactTableAction.html |     4 +-
 .../actions/DecreaseMaxHFileSizeAction.html     |     4 +-
 .../chaos/actions/DumpClusterStatusAction.html  |     4 +-
 .../actions/FlushRandomRegionOfTableAction.html |     4 +-
 .../hbase/chaos/actions/FlushTableAction.html   |     4 +-
 .../chaos/actions/ForceBalancerAction.html      |     4 +-
 ...MergeRandomAdjacentRegionsOfTableAction.html |     4 +-
 .../actions/MoveRandomRegionOfTableAction.html  |     4 +-
 .../chaos/actions/MoveRegionsOfTableAction.html |     4 +-
 .../hbase/chaos/actions/RemoveColumnAction.html |     4 +-
 .../chaos/actions/RestartActionBaseAction.html  |    34 +-
 .../actions/RestartActiveMasterAction.html      |    10 +-
 .../actions/RestartActiveNameNodeAction.html    |   407 +
 .../actions/RestartRandomDataNodeAction.html    |    10 +-
 .../chaos/actions/RestartRandomRsAction.html    |     6 +-
 .../RestartRandomRsExceptMetaAction.html        |     6 +-
 .../actions/RestartRandomZKNodeAction.html      |     6 +-
 .../actions/RestartRsHoldingMetaAction.html     |     6 +-
 .../actions/RestartRsHoldingTableAction.html    |     6 +-
 .../actions/RollingBatchRestartRsAction.html    |     6 +-
 .../RollingBatchRestartRsExceptMetaAction.html  |     6 +-
 .../chaos/actions/SnapshotTableAction.html      |     4 +-
 .../actions/SplitAllRegionOfTableAction.html    |     4 +-
 .../actions/SplitRandomRegionOfTableAction.html |     4 +-
 .../actions/TestChangeSplitPolicyAction.html    |     4 +-
 .../chaos/actions/TruncateTableAction.html      |     4 +-
 .../UnbalanceKillAndRebalanceAction.html        |     4 +-
 .../chaos/actions/UnbalanceRegionsAction.html   |     4 +-
 .../hbase/chaos/actions/class-use/Action.html   |    34 +-
 .../class-use/RestartActionBaseAction.html      |    20 +-
 .../class-use/RestartActiveNameNodeAction.html  |   125 +
 .../hbase/chaos/actions/package-frame.html      |     1 +
 .../hbase/chaos/actions/package-summary.html    |    34 +-
 .../hbase/chaos/actions/package-tree.html       |     1 +
 .../CacheTestUtils.ByteArrayCacheable.html      |    18 +-
 .../io/hfile/CacheTestUtils.HFileBlockPair.html |    12 +-
 .../hadoop/hbase/io/hfile/CacheTestUtils.html   |     4 +-
 .../TestBucketCache.MockedBucketCache.html      |     8 +-
 .../hbase/io/hfile/bucket/TestBucketCache.html  |    80 +-
 .../TestBucketWriterThread.MockBucketCache.html |     6 +-
 .../io/hfile/bucket/TestBucketWriterThread.html |    30 +-
 .../org/apache/hadoop/hbase/package-tree.html   |    10 +-
 .../hadoop/hbase/procedure2/package-tree.html   |     2 +-
 .../TestShutdownWhileWALBroken.html             |     4 +-
 .../hadoop/hbase/regionserver/package-tree.html |     4 +-
 .../regionserver/wal/TestAsyncLogRolling.html   |     2 +-
 .../hbase/regionserver/wal/TestLogRolling.html  |     6 +-
 .../apache/hadoop/hbase/test/package-tree.html  |     6 +-
 .../TestWALFactory.DumbWALActionsListener.html  |    10 +-
 .../apache/hadoop/hbase/wal/TestWALFactory.html |   118 +-
 .../apache/hadoop/hbase/wal/package-tree.html   |     2 +-
 testdevapidocs/overview-tree.html               |     1 +
 testdevapidocs/serialized-form.html             |    28 -
 ...ter.ServerNameIgnoreStartCodeComparator.html |   661 +-
 .../hadoop/hbase/DistributedHBaseCluster.html   |   661 +-
 .../org/apache/hadoop/hbase/HBaseCluster.html   |   739 +-
 ...lusterManager.CommandProvider.Operation.html |   580 +-
 .../HBaseClusterManager.CommandProvider.html    |   580 +-
 ...lusterManager.HBaseShellCommandProvider.html |   580 +-
 ...usterManager.HadoopShellCommandProvider.html |   580 +-
 .../hbase/HBaseClusterManager.RemoteShell.html  |   580 +-
 ...erManager.ZookeeperShellCommandProvider.html |   580 +-
 .../hadoop/hbase/HBaseClusterManager.html       |   580 +-
 ...aseCluster.MiniHBaseClusterRegionServer.html |  1768 +-
 ...eCluster.SingleFileSystemShutdownThread.html |  1768 +-
 .../apache/hadoop/hbase/MiniHBaseCluster.html   |  1768 +-
 .../chaos/actions/Action.ActionContext.html     |   588 +-
 .../hadoop/hbase/chaos/actions/Action.html      |   588 +-
 .../chaos/actions/RestartActionBaseAction.html  |    14 +-
 .../actions/RestartActiveNameNodeAction.html    |   162 +
 .../CacheTestUtils.ByteArrayCacheable.html      |   245 +-
 .../io/hfile/CacheTestUtils.HFileBlockPair.html |   245 +-
 .../hadoop/hbase/io/hfile/CacheTestUtils.html   |   245 +-
 .../TestBucketCache.MockedBucketCache.html      |   860 +-
 .../hbase/io/hfile/bucket/TestBucketCache.html  |   860 +-
 .../TestBucketWriterThread.MockBucketCache.html |   307 +-
 .../io/hfile/bucket/TestBucketWriterThread.html |   307 +-
 ...stShutdownWhileWALBroken.MyRegionServer.html |    91 +-
 .../TestShutdownWhileWALBroken.html             |    91 +-
 .../regionserver/wal/TestAsyncLogRolling.html   |    43 +-
 .../hbase/regionserver/wal/TestLogRolling.html  |   585 +-
 .../TestWALFactory.DumbWALActionsListener.html  |  1356 +-
 .../apache/hadoop/hbase/wal/TestWALFactory.html |  1356 +-
 294 files changed, 78763 insertions(+), 72825 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/acid-semantics.html
----------------------------------------------------------------------
diff --git a/acid-semantics.html b/acid-semantics.html
index 973237a..10279a9 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Apache HBase (TM) ACID Properties
@@ -601,7 +601,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 


[37/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
index c26e1d4..1ddba78 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
@@ -1132,7 +1132,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteColumn</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.621">deleteColumn</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.623">deleteColumn</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                         org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest&nbsp;req)
                                                                                                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1149,7 +1149,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteNamespace</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.641">deleteNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.643">deleteNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest&nbsp;request)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1166,7 +1166,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteSnapshot</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.662">deleteSnapshot</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.664">deleteSnapshot</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest&nbsp;request)
                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Execute Delete Snapshot operation.</div>
@@ -1188,7 +1188,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteTable</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.677">deleteTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.679">deleteTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                       org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest&nbsp;request)
                                                                                                throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1205,7 +1205,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>truncateTable</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.689">truncateTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.691">truncateTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest&nbsp;request)
                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1222,7 +1222,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>disableTable</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.704">disableTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.706">disableTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                         org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest&nbsp;request)
                                                                                                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1239,7 +1239,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>enableCatalogJanitor</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.718">enableCatalogJanitor</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.720">enableCatalogJanitor</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                         org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest&nbsp;req)
                                                                                                                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1256,7 +1256,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>setCleanerChoreRunning</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.726">setCleanerChoreRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.728">setCleanerChoreRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest&nbsp;req)
                                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1273,7 +1273,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>enableTable</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.738">enableTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.740">enableTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                       org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest&nbsp;request)
                                                                                                throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1290,7 +1290,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>mergeTableRegions</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.752">mergeTableRegions</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.754">mergeTableRegions</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                   org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest&nbsp;request)
                                                                                                            throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1307,7 +1307,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>splitRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.792">splitRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.794">splitRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest&nbsp;request)
                                                                                                     throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1324,7 +1324,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>execMasterService</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.807">execMasterService</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.809">execMasterService</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                    org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest&nbsp;request)
                                                                                                             throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1341,7 +1341,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>execProcedure</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.851">execProcedure</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.853">execProcedure</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest&nbsp;request)
                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Triggers an asynchronous attempt to run a distributed procedure.
@@ -1360,7 +1360,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>execProcedureWithRet</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.883">execProcedureWithRet</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.885">execProcedureWithRet</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                  org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest&nbsp;request)
                                                                                                           throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Triggers a synchronous attempt to run a distributed procedure and sets
@@ -1380,7 +1380,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getClusterStatus</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.907">getClusterStatus</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.909">getClusterStatus</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest&nbsp;req)
                                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1397,7 +1397,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompletedSnapshots</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.924">getCompletedSnapshots</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.926">getCompletedSnapshots</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest&nbsp;request)
                                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">List the currently available/stored snapshots. Any in-progress snapshots are ignored</div>
@@ -1415,7 +1415,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getNamespaceDescriptor</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.942">getNamespaceDescriptor</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.944">getNamespaceDescriptor</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest&nbsp;request)
                                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1432,7 +1432,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getSchemaAlterStatus</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.964">getSchemaAlterStatus</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.966">getSchemaAlterStatus</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                         org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest&nbsp;req)
                                                                                                                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Get the number of regions of the table that have been updated by the alter.</div>
@@ -1454,7 +1454,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableDescriptors</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.993">getTableDescriptors</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.995">getTableDescriptors</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                       org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest&nbsp;req)
                                                                                                                throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Get list of TableDescriptors for requested tables.</div>
@@ -1478,7 +1478,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableNames</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1032">getTableNames</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1034">getTableNames</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest&nbsp;req)
                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Get list of userspace table names</div>
@@ -1501,7 +1501,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableState</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1056">getTableState</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1058">getTableState</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest&nbsp;request)
                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1518,7 +1518,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isCatalogJanitorEnabled</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1071">isCatalogJanitorEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1073">isCatalogJanitorEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest&nbsp;req)
                                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1535,7 +1535,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isCleanerChoreEnabled</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1078">isCleanerChoreEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1080">isCleanerChoreEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest&nbsp;req)
                                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1552,7 +1552,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isMasterRunning</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1086">isMasterRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1088">isMasterRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest&nbsp;req)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1569,7 +1569,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isProcedureDone</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1103">isProcedureDone</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1105">isProcedureDone</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest&nbsp;request)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Checks if the specified procedure is done.</div>
@@ -1589,7 +1589,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isSnapshotDone</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1137">isSnapshotDone</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1139">isSnapshotDone</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest&nbsp;request)
                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Checks if the specified snapshot is done.</div>
@@ -1611,7 +1611,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getProcedureResult</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1155">getProcedureResult</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1157">getProcedureResult</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest&nbsp;request)
                                                                                                              throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1628,7 +1628,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>abortProcedure</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1191">abortProcedure</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;rpcController,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1193">abortProcedure</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;rpcController,
                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest&nbsp;request)
                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1645,7 +1645,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>listNamespaceDescriptors</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1205">listNamespaceDescriptors</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1207">listNamespaceDescriptors</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest&nbsp;request)
                                                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1662,7 +1662,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getProcedures</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1220">getProcedures</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;rpcController,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1222">getProcedures</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;rpcController,
                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest&nbsp;request)
                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1679,7 +1679,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getLocks</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1235">getLocks</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1237">getLocks</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest&nbsp;request)
                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1696,7 +1696,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>listTableDescriptorsByNamespace</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1252">listTableDescriptorsByNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1254">listTableDescriptorsByNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest&nbsp;request)
                                                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1713,7 +1713,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>listTableNamesByNamespace</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1268">listTableNamesByNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1270">listTableNamesByNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                                   org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest&nbsp;request)
                                                                                                                            throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1730,7 +1730,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>modifyColumn</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1283">modifyColumn</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1285">modifyColumn</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                         org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest&nbsp;req)
                                                                                                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1747,7 +1747,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>modifyNamespace</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1303">modifyNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1305">modifyNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest&nbsp;request)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1764,7 +1764,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>modifyTable</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1317">modifyTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1319">modifyTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                       org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest&nbsp;req)
                                                                                                throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1781,7 +1781,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>moveRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1332">moveRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1334">moveRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest&nbsp;req)
                                                                                              throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1798,7 +1798,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>offlineRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1362">offlineRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1364">offlineRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest&nbsp;request)
                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Offline specified region from master's in-memory state. It will not attempt to
@@ -1819,7 +1819,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>restoreSnapshot</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1405">restoreSnapshot</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1407">restoreSnapshot</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest&nbsp;request)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Execute Restore/Clone snapshot operation.
@@ -1847,7 +1847,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>runCatalogScan</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1419">runCatalogScan</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1421">runCatalogScan</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest&nbsp;req)
                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1864,7 +1864,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>runCleanerChore</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1430">runCleanerChore</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1432">runCleanerChore</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest&nbsp;req)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1881,7 +1881,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>setBalancerRunning</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1438">setBalancerRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1440">setBalancerRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest&nbsp;req)
                                                                                                              throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1898,7 +1898,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>shutdown</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1451">shutdown</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1453">shutdown</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest&nbsp;request)
                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1915,7 +1915,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>snapshot</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1468">snapshot</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1470">snapshot</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest&nbsp;request)
                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Triggers an asynchronous attempt to take a snapshot.
@@ -1934,7 +1934,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>stopMaster</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1493">stopMaster</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1495">stopMaster</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest&nbsp;request)
                                                                                              throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1951,7 +1951,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isMasterInMaintenanceMode</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1506">isMasterInMaintenanceMode</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1508">isMasterInMaintenanceMode</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest&nbsp;request)
                                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1968,7 +1968,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>unassignRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1519">unassignRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1521">unassignRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest&nbsp;req)
                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1985,7 +1985,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>reportRegionStateTransition</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1560">reportRegionStateTransition</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1562">reportRegionStateTransition</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                                                   org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest&nbsp;req)
                                                                                                                                            throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2002,7 +2002,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>setQuota</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1571">setQuota</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1573">setQuota</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest&nbsp;req)
                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2019,7 +2019,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastMajorCompactionTimestamp</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1582">getLastMajorCompactionTimestamp</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1584">getLastMajorCompactionTimestamp</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                        org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest&nbsp;request)
                                                                                                                                 throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2036,7 +2036,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastMajorCompactionTimestampForRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1597">getLastMajorCompactionTimestampForRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1599">getLastMajorCompactionTimestampForRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest&nbsp;request)
                                                                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2053,7 +2053,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>compactRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1621">compactRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1623">compactRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                          org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest&nbsp;request)
                                                                                                   throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Compact a region on the master.</div>
@@ -2076,7 +2076,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>checkHFileFormatVersionForMob</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1643">checkHFileFormatVersionForMob</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1645">checkHFileFormatVersionForMob</a>()
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">check configured hfile format version before to do compaction</div>
 <dl>
@@ -2091,7 +2091,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionInfo</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1655">getRegionInfo</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1657">getRegionInfo</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                          org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest&nbsp;request)
                                                                                                   throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2110,7 +2110,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>compactMob</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1680">compactMob</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest&nbsp;request,
+<pre>private&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1682">compactMob</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest&nbsp;request,
                                                                                                        <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)
                                                                                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Compacts the mob files in the current table.</div>
@@ -2131,7 +2131,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isBalancerEnabled</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1726">isBalancerEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1728">isBalancerEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                   org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest&nbsp;request)
                                                                                                            throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2148,7 +2148,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>setSplitOrMergeEnabled</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1734">setSplitOrMergeEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1736">setSplitOrMergeEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest&nbsp;request)
                                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2165,7 +2165,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isSplitOrMergeEnabled</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1761">isSplitOrMergeEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1763">isSplitOrMergeEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest&nbsp;request)
                                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2182,7 +2182,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>normalize</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1769">normalize</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1771">normalize</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                   org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest&nbsp;request)
                                                                                            throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2199,7 +2199,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>setNormalizerRunning</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1780">setNormalizerRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1782">setNormalizerRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                         org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest&nbsp;request)
                                                                                                                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2216,7 +2216,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isNormalizerEnabled</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1797">isNormalizerEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1799">isNormalizerEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                       org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest&nbsp;request)
                                                                                                                throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2233,7 +2233,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getSecurityCapabilities</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1808">getSecurityCapabilities</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1810">getSecurityCapabilities</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest&nbsp;request)
                                                                                                                     throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Returns the security capabilities in effect on the cluster</div>
@@ -2251,7 +2251,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>hasAccessControlServiceCoprocessor</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1847">hasAccessControlServiceCoprocessor</a>(<a href="../../../../../org/apache/hadoop/hbase/master/MasterCoprocessorHost.html" title="class in org.apache.hadoop.hbase.master">MasterCoprocessorHost</a>&nbsp;cpHost)</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1849">hasAccessControlServiceCoprocessor</a>(<a href="../../../../../org/apache/hadoop/hbase/master/MasterCoprocessorHost.html" title="class in org.apache.hadoop.hbase.master">MasterCoprocessorHost</a>&nbsp;cpHost)</pre>
 <div class="block">Determines if there is a MasterCoprocessor deployed which implements
  <code>AccessControlProtos.AccessControlService.Interface</code>.</div>
 </li>
@@ -2262,7 +2262,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>hasVisibilityLabelsServiceCoprocessor</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1856">hasVisibilityLabelsServiceCoprocessor</a>(<a href="../../../../../org/apache/hadoop/hbase/master/MasterCoprocessorHost.html" title="class in org.apache.hadoop.hbase.master">MasterCoprocessorHost</a>&nbsp;cpHost)</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1858">hasVisibilityLabelsServiceCoprocessor</a>(<a href="../../../../../org/apache/hadoop/hbase/master/MasterCoprocessorHost.html" title="class in org.apache.hadoop.hbase.master">MasterCoprocessorHost</a>&nbsp;cpHost)</pre>
 <div class="block">Determines if there is a MasterCoprocessor deployed which implements
  <code>VisibilityLabelsProtos.VisibilityLabelsService.Interface</code>.</div>
 </li>
@@ -2273,7 +2273,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>checkCoprocessorWithService</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1866">checkCoprocessorWithService</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/coprocessor/MasterCoprocessor.html" title="interface in org.apache.hadoop.hbase.coprocessor">MasterCoprocessor</a>&gt;&nbsp;coprocessorsToCheck,
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1868">checkCoprocessorWithService</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/coprocessor/MasterCoprocessor.html" title="interface in org.apache.hadoop.hbase.coprocessor">MasterCoprocessor</a>&gt;&nbsp;coprocessorsToCheck,
                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;?&gt;&nbsp;service)</pre>
 <div class="block">Determines if there is a coprocessor implementation in the provided argument which extends
  or implements the provided <code>service</code>.</div>
@@ -2285,7 +2285,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>convert</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client">MasterSwitchType</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1879">convert</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType&nbsp;switchType)</pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client">MasterSwitchType</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1881">convert</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType&nbsp;switchType)</pre>
 </li>
 </ul>
 <a name="addReplicationPeer-org.apache.hbase.thirdparty.com.google.protobuf.RpcController-org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest-">
@@ -2294,7 +2294,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>addReplicationPeer</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1892">addReplicationPeer</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1894">addReplicationPeer</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                          org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest&nbsp;request)
                                                                                                                   throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2311,7 +2311,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>removeReplicationPeer</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1905">removeReplicationPeer</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1907">removeReplicationPeer</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest&nbsp;request)
                                                                                                                         throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2328,7 +2328,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>enableReplicationPeer</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1916">enableReplicationPeer</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1918">enableReplicationPeer</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest&nbsp;request)
                                                                                                                         throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2345,7 +2345,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>disableReplicationPeer</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1927">disableReplicationPeer</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1929">disableReplicationPeer</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                  org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest&nbsp;request)
                                                                                                                           throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2362,7 +2362,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getReplicationPeerConfig</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1938">getReplicationPeerConfig</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1940">getReplicationPeerConfig</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                      org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest&nbsp;request)
                                                                                                                               throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2379,7 +2379,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>updateReplicationPeerConfig</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1954">updateReplicationPeerConfig</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1956">updateReplicationPeerConfig</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                            org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest&nbsp;request)
                                                                                                                                     throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2396,7 +2396,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>transitReplicationPeerSyncReplicationState</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TransitReplicationPeerSyncReplicationStateResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1967">transitReplicationPeerSyncReplicationState</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TransitReplicationPeerSyncReplicationStateResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1969">transitReplicationPeerSyncReplicationState</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                                                          org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TransitReplicationPeerSyncReplicationStateRequest&nbsp;request)
                                                                                                              

<TRUNCATED>

[40/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html
index f1836f9..909e074 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html" target="_top">Frames</a></li>
@@ -372,7 +372,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheStat
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html" target="_top">Frames</a></li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html
new file mode 100644
index 0000000..65769c4
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html
@@ -0,0 +1,373 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<!-- NewPage -->
+<html lang="en">
+<head>
+<!-- Generated by javadoc -->
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>BucketProtoUtils (Apache HBase 3.0.0-SNAPSHOT API)</title>
+<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
+<script type="text/javascript" src="../../../../../../../script.js"></script>
+</head>
+<body>
+<script type="text/javascript"><!--
+    try {
+        if (location.href.indexOf('is-external=true') == -1) {
+            parent.document.title="BucketProtoUtils (Apache HBase 3.0.0-SNAPSHOT API)";
+        }
+    }
+    catch(err) {
+    }
+//-->
+var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+</script>
+<noscript>
+<div>JavaScript is disabled on your browser.</div>
+</noscript>
+<!-- ========= START OF TOP NAVBAR ======= -->
+<div class="topNav"><a name="navbar.top">
+<!--   -->
+</a>
+<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
+<a name="navbar.top.firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
+<li><a href="package-summary.html">Package</a></li>
+<li class="navBarCell1Rev">Class</li>
+<li><a href="class-use/BucketProtoUtils.html">Use</a></li>
+<li><a href="package-tree.html">Tree</a></li>
+<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
+<li><a href="../../../../../../../index-all.html">Index</a></li>
+<li><a href="../../../../../../../help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+</ul>
+<ul class="navList">
+<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" target="_top">Frames</a></li>
+<li><a href="BucketProtoUtils.html" target="_top">No&nbsp;Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_top">
+<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_top");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<div>
+<ul class="subNavList">
+<li>Summary:&nbsp;</li>
+<li>Nested&nbsp;|&nbsp;</li>
+<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method.summary">Method</a></li>
+</ul>
+<ul class="subNavList">
+<li>Detail:&nbsp;</li>
+<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method.detail">Method</a></li>
+</ul>
+</div>
+<a name="skip.navbar.top">
+<!--   -->
+</a></div>
+<!-- ========= END OF TOP NAVBAR ========= -->
+<!-- ======== START OF CLASS DATA ======== -->
+<div class="header">
+<div class="subTitle">org.apache.hadoop.hbase.io.hfile.bucket</div>
+<h2 title="Class BucketProtoUtils" class="title">Class BucketProtoUtils</h2>
+</div>
+<div class="contentContainer">
+<ul class="inheritance">
+<li><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">java.lang.Object</a></li>
+<li>
+<ul class="inheritance">
+<li>org.apache.hadoop.hbase.io.hfile.bucket.BucketProtoUtils</li>
+</ul>
+</li>
+</ul>
+<div class="description">
+<ul class="blockList">
+<li class="blockList">
+<hr>
+<br>
+<pre>@InterfaceAudience.Private
+final class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#line.37">BucketProtoUtils</a>
+extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
+</li>
+</ul>
+</div>
+<div class="summary">
+<ul class="blockList">
+<li class="blockList">
+<!-- ======== CONSTRUCTOR SUMMARY ======== -->
+<ul class="blockList">
+<li class="blockList"><a name="constructor.summary">
+<!--   -->
+</a>
+<h3>Constructor Summary</h3>
+<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
+<caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier</th>
+<th class="colLast" scope="col">Constructor and Description</th>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>private </code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#BucketProtoUtils--">BucketProtoUtils</a></span>()</code>&nbsp;</td>
+</tr>
+</table>
+</li>
+</ul>
+<!-- ========== METHOD SUMMARY =========== -->
+<ul class="blockList">
+<li class="blockList"><a name="method.summary">
+<!--   -->
+</a>
+<h3>Method Summary</h3>
+<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
+<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t1" class="tableTab"><span><a href="javascript:show(1);">Static Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Method and Description</th>
+</tr>
+<tr id="i0" class="altColor">
+<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a></code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#fromPb-org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType-">fromPb</a></span>(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType&nbsp;blockType)</code>&nbsp;</td>
+</tr>
+<tr id="i1" class="rowColor">
+<td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentHashMap</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#fromPB-java.util.Map-org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap-">fromPB</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;deserializers,
+      org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap&nbsp;backingMap)</code>&nbsp;</td>
+</tr>
+<tr id="i2" class="altColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-">toPB</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;key)</code>&nbsp;</td>
+</tr>
+<tr id="i3" class="rowColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.BlockPriority-">toPB</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a>&nbsp;p)</code>&nbsp;</td>
+</tr>
+<tr id="i4" class="altColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.BlockType-">toPB</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</code>&nbsp;</td>
+</tr>
+<tr id="i5" class="rowColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry-">toPB</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&nbsp;entry)</code>&nbsp;</td>
+</tr>
+<tr id="i6" class="altColor">
+<td class="colFirst"><code>(package private) static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.bucket.BucketCache-">toPB</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a>&nbsp;cache)</code>&nbsp;</td>
+</tr>
+<tr id="i7" class="rowColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-java.util.Map-">toPB</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;&nbsp;backingMap)</code>&nbsp;</td>
+</tr>
+</table>
+<ul class="blockList">
+<li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
+<!--   -->
+</a>
+<h3>Methods inherited from class&nbsp;java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></h3>
+<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--" title="class or interface in java.lang">clone</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.lang">equals</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--" title="class or interface in java.lang">finalize</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--" title="class or interface in java.lang">getClass</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--" title="class or interface in java.lang">hashCode</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--" title="class or interface in java.lang">notify</a>, <a href="https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in java.lang">notifyAll</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--" title="class or interface in java.lang">wait</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-" title="class or interface in java.lang">wait</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-" title="class or interface in java.lang">wait</a></code></li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+</div>
+<div class="details">
+<ul class="blockList">
+<li class="blockList">
+<!-- ========= CONSTRUCTOR DETAIL ======== -->
+<ul class="blockList">
+<li class="blockList"><a name="constructor.detail">
+<!--   -->
+</a>
+<h3>Constructor Detail</h3>
+<a name="BucketProtoUtils--">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>BucketProtoUtils</h4>
+<pre>private&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#line.38">BucketProtoUtils</a>()</pre>
+</li>
+</ul>
+</li>
+</ul>
+<!-- ============ METHOD DETAIL ========== -->
+<ul class="blockList">
+<li class="blockList"><a name="method.detail">
+<!--   -->
+</a>
+<h3>Method Detail</h3>
+<a name="toPB-org.apache.hadoop.hbase.io.hfile.bucket.BucketCache-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>toPB</h4>
+<pre>static&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#line.42">toPB</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a>&nbsp;cache)</pre>
+</li>
+</ul>
+<a name="toPB-java.util.Map-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>toPB</h4>
+<pre>private static&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#line.52">toPB</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;&nbsp;backingMap)</pre>
+</li>
+</ul>
+<a name="toPB-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>toPB</h4>
+<pre>private static&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#line.64">toPB</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;key)</pre>
+</li>
+</ul>
+<a name="toPB-org.apache.hadoop.hbase.io.hfile.BlockType-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>toPB</h4>
+<pre>private static&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#line.73">toPB</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
+</li>
+</ul>
+<a name="toPB-org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>toPB</h4>
+<pre>private static&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#line.104">toPB</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&nbsp;entry)</pre>
+</li>
+</ul>
+<a name="toPB-org.apache.hadoop.hbase.io.hfile.BlockPriority-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>toPB</h4>
+<pre>private static&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#line.114">toPB</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a>&nbsp;p)</pre>
+</li>
+</ul>
+<a name="fromPB-java.util.Map-org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>fromPB</h4>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentHashMap</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#line.127">fromPB</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="https://docs.oracle.com/javase/8
 /docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;deserializers,
+                                                                       org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap&nbsp;backingMap)
+                                                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
+</dl>
+</li>
+</ul>
+<a name="fromPb-org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType-">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>fromPb</h4>
+<pre>private static&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#line.161">fromPb</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType&nbsp;blockType)</pre>
+</li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+</div>
+</div>
+<!-- ========= END OF CLASS DATA ========= -->
+<!-- ======= START OF BOTTOM NAVBAR ====== -->
+<div class="bottomNav"><a name="navbar.bottom">
+<!--   -->
+</a>
+<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
+<a name="navbar.bottom.firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
+<li><a href="package-summary.html">Package</a></li>
+<li class="navBarCell1Rev">Class</li>
+<li><a href="class-use/BucketProtoUtils.html">Use</a></li>
+<li><a href="package-tree.html">Tree</a></li>
+<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
+<li><a href="../../../../../../../index-all.html">Index</a></li>
+<li><a href="../../../../../../../help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+</ul>
+<ul class="navList">
+<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" target="_top">Frames</a></li>
+<li><a href="BucketProtoUtils.html" target="_top">No&nbsp;Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_bottom">
+<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_bottom");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<div>
+<ul class="subNavList">
+<li>Summary:&nbsp;</li>
+<li>Nested&nbsp;|&nbsp;</li>
+<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method.summary">Method</a></li>
+</ul>
+<ul class="subNavList">
+<li>Detail:&nbsp;</li>
+<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method.detail">Method</a></li>
+</ul>
+</div>
+<a name="skip.navbar.bottom">
+<!--   -->
+</a></div>
+<!-- ======== END OF BOTTOM NAVBAR ======= -->
+<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2018 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
+</body>
+</html>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.html
index fa51719..1e2490d 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
@@ -509,7 +509,7 @@ So said all these, when we read a block it may be possible that the bytes of tha
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html
index 016034c..3a5826d 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" target="_top">Frames</a></li>
@@ -332,7 +332,7 @@ public interface <a href="../../../../../../../src-html/org/apache/hadoop/hbase/
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" target="_top">Frames</a></li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html
deleted file mode 100644
index 633d5e2..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html
+++ /dev/null
@@ -1,378 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>UniqueIndexMap (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="UniqueIndexMap (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-var methods = {"i0":10,"i1":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/UniqueIndexMap.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" target="_top">Frames</a></li>
-<li><a href="UniqueIndexMap.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li><a href="#field.summary">Field</a>&nbsp;|&nbsp;</li>
-<li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li><a href="#field.detail">Field</a>&nbsp;|&nbsp;</li>
-<li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<!-- ======== START OF CLASS DATA ======== -->
-<div class="header">
-<div class="subTitle">org.apache.hadoop.hbase.io.hfile.bucket</div>
-<h2 title="Class UniqueIndexMap" class="title">Class UniqueIndexMap&lt;T&gt;</h2>
-</div>
-<div class="contentContainer">
-<ul class="inheritance">
-<li><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">java.lang.Object</a></li>
-<li>
-<ul class="inheritance">
-<li>org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap&lt;T&gt;</li>
-</ul>
-</li>
-</ul>
-<div class="description">
-<ul class="blockList">
-<li class="blockList">
-<dl>
-<dt>All Implemented Interfaces:</dt>
-<dd><a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a></dd>
-</dl>
-<hr>
-<br>
-<pre>@InterfaceAudience.Private
-public final class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#line.32">UniqueIndexMap</a>&lt;T&gt;
-extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
-implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a></pre>
-<div class="block">Map from type T to int and vice-versa. Used for reducing bit field item
- counts.</div>
-<dl>
-<dt><span class="seeLabel">See Also:</span></dt>
-<dd><a href="../../../../../../../serialized-form.html#org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap">Serialized Form</a></dd>
-</dl>
-</li>
-</ul>
-</div>
-<div class="summary">
-<ul class="blockList">
-<li class="blockList">
-<!-- =========== FIELD SUMMARY =========== -->
-<ul class="blockList">
-<li class="blockList"><a name="field.summary">
-<!--   -->
-</a>
-<h3>Field Summary</h3>
-<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Field Summary table, listing fields, and an explanation">
-<caption><span>Fields</span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Field and Description</th>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><code>(package private) <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentHashMap</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="type parameter in UniqueIndexMap">T</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#mForwardMap">mForwardMap</a></span></code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><code>(package private) <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#mIndex">mIndex</a></span></code>&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><code>(package private) <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentHashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="type parameter in UniqueIndexMap">T</a>&gt;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#mReverseMap">mReverseMap</a></span></code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private static long</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#serialVersionUID">serialVersionUID</a></span></code>&nbsp;</td>
-</tr>
-</table>
-</li>
-</ul>
-<!-- ======== CONSTRUCTOR SUMMARY ======== -->
-<ul class="blockList">
-<li class="blockList"><a name="constructor.summary">
-<!--   -->
-</a>
-<h3>Constructor Summary</h3>
-<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
-<caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colOne" scope="col">Constructor and Description</th>
-</tr>
-<tr class="altColor">
-<td class="colOne"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#UniqueIndexMap--">UniqueIndexMap</a></span>()</code>&nbsp;</td>
-</tr>
-</table>
-</li>
-</ul>
-<!-- ========== METHOD SUMMARY =========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.summary">
-<!--   -->
-</a>
-<h3>Method Summary</h3>
-<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
-<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tr id="i0" class="altColor">
-<td class="colFirst"><code>(package private) int</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#map-T-">map</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="type parameter in UniqueIndexMap">T</a>&nbsp;parameter)</code>&nbsp;</td>
-</tr>
-<tr id="i1" class="rowColor">
-<td class="colFirst"><code>(package private) <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="type parameter in UniqueIndexMap">T</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#unmap-int-">unmap</a></span>(int&nbsp;leni)</code>&nbsp;</td>
-</tr>
-</table>
-<ul class="blockList">
-<li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
-<!--   -->
-</a>
-<h3>Methods inherited from class&nbsp;java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></h3>
-<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--" title="class or interface in java.lang">clone</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.lang">equals</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--" title="class or interface in java.lang">finalize</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--" title="class or interface in java.lang">getClass</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--" title="class or interface in java.lang">hashCode</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--" title="class or interface in java.lang">notify</a>, <a href="https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in java.lang">notifyAll</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--" title="class or interface in java.lang">wait</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-" title="class or interface in java.lang">wait</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-" title="class or interface in java.lang">wait</a></code></li>
-</ul>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<div class="details">
-<ul class="blockList">
-<li class="blockList">
-<!-- ============ FIELD DETAIL =========== -->
-<ul class="blockList">
-<li class="blockList"><a name="field.detail">
-<!--   -->
-</a>
-<h3>Field Detail</h3>
-<a name="serialVersionUID">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>serialVersionUID</h4>
-<pre>private static final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#line.33">serialVersionUID</a></pre>
-<dl>
-<dt><span class="seeLabel">See Also:</span></dt>
-<dd><a href="../../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap.serialVersionUID">Constant Field Values</a></dd>
-</dl>
-</li>
-</ul>
-<a name="mForwardMap">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>mForwardMap</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentHashMap</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="type parameter in UniqueIndexMap">T</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#line.35">mForwardMap</a></pre>
-</li>
-</ul>
-<a name="mReverseMap">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>mReverseMap</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentHashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="type parameter in UniqueIndexMap">T</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#line.36">mReverseMap</a></pre>
-</li>
-</ul>
-<a name="mIndex">
-<!--   -->
-</a>
-<ul class="blockListLast">
-<li class="blockList">
-<h4>mIndex</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#line.37">mIndex</a></pre>
-</li>
-</ul>
-</li>
-</ul>
-<!-- ========= CONSTRUCTOR DETAIL ======== -->
-<ul class="blockList">
-<li class="blockList"><a name="constructor.detail">
-<!--   -->
-</a>
-<h3>Constructor Detail</h3>
-<a name="UniqueIndexMap--">
-<!--   -->
-</a>
-<ul class="blockListLast">
-<li class="blockList">
-<h4>UniqueIndexMap</h4>
-<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#line.32">UniqueIndexMap</a>()</pre>
-</li>
-</ul>
-</li>
-</ul>
-<!-- ============ METHOD DETAIL ========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.detail">
-<!--   -->
-</a>
-<h3>Method Detail</h3>
-<a name="map-java.lang.Object-">
-<!--   -->
-</a><a name="map-T-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>map</h4>
-<pre>int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#line.41">map</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="type parameter in UniqueIndexMap">T</a>&nbsp;parameter)</pre>
-</li>
-</ul>
-<a name="unmap-int-">
-<!--   -->
-</a>
-<ul class="blockListLast">
-<li class="blockList">
-<h4>unmap</h4>
-<pre><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="type parameter in UniqueIndexMap">T</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html#line.51">unmap</a>(int&nbsp;leni)</pre>
-</li>
-</ul>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-</div>
-<!-- ========= END OF CLASS DATA ========= -->
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/UniqueIndexMap.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" target="_top">Frames</a></li>
-<li><a href="UniqueIndexMap.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li><a href="#field.summary">Field</a>&nbsp;|&nbsp;</li>
-<li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li><a href="#field.detail">Field</a>&nbsp;|&nbsp;</li>
-<li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2018 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html
index 26ded4b..f0cb7c4 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li>Next&nbsp;Class</li>
 </ul>
 <ul class="navList">
@@ -231,7 +231,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/Bu
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.hbase.io.hfile.bucket.<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></h3>
-<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#access-long-">access</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">deserializerReference</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getCachedTime--">getCachedTime</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getLength--">getLength</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getPriority--">getPriority</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#offset--">offset</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase
 .io.hfile.CacheableDeserializer-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">setDeserialiserReference</a></code></li>
+<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#access-long-">access</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference--">deserializerReference</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getAccessCounter--">getAccessCounter</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getCachedTime--">getCachedTime</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getLength--">getLength</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#getPriority--">getPriority</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#offset--">offset</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/hfi
 le/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-">setDeserialiserReference</a></code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
@@ -424,7 +424,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/Bu
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li>Next&nbsp;Class</li>
 </ul>
 <ul class="navList">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketAllocator.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketAllocator.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketAllocator.html
index 79f5134..1a8266b 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketAllocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketAllocator.html
@@ -133,9 +133,8 @@
 <tbody>
 <tr class="altColor">
 <td class="colFirst"><code><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.RAMQueueEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
+<td class="colLast"><span class="typeNameLabel">BucketCache.RAMQueueEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
             <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a>&nbsp;bucketAllocator,
-            <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a>&nbsp;realCacheSize)</code>&nbsp;</td>
 </tr>
 </tbody>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketAllocatorException.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketAllocatorException.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketAllocatorException.html
index e150876..e4749e0 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketAllocatorException.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketAllocatorException.html
@@ -116,14 +116,9 @@
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private void</code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#retrieveFromFile-int:A-">retrieveFromFile</a></span>(int[]&nbsp;bucketSizes)</code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.RAMQueueEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
+<td class="colLast"><span class="typeNameLabel">BucketCache.RAMQueueEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
             <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a>&nbsp;bucketAllocator,
-            <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a>&nbsp;realCacheSize)</code>&nbsp;</td>
 </tr>
 </tbody>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketCache.BucketEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketCache.BucketEntry.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketCache.BucketEntry.html
index 965cf37..22acedd 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketCache.BucketEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketCache.BucketEntry.html
@@ -145,9 +145,8 @@
 <tbody>
 <tr class="altColor">
 <td class="colFirst"><code><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.RAMQueueEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
+<td class="colLast"><span class="typeNameLabel">BucketCache.RAMQueueEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
             <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a>&nbsp;bucketAllocator,
-            <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a>&nbsp;realCacheSize)</code>&nbsp;</td>
 </tr>
 </tbody>
@@ -160,10 +159,15 @@
 </tr>
 <tbody>
 <tr class="altColor">
+<td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentHashMap</a>&lt;<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;</code></td>
+<td class="colLast"><span class="typeNameLabel">BucketProtoUtils.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#fromPB-java.util.Map-org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap-">fromPB</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;deserializers,
+      org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap&nbsp;backingMap)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">CachedEntryQueue.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.html#poll--">poll</a></span>()</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">CachedEntryQueue.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.html#pollLast--">pollLast</a></span>()</code>&nbsp;</td>
 </tr>
@@ -189,6 +193,10 @@
 <div class="block">Put the new bucket entry into backingMap.</div>
 </td>
 </tr>
+<tr class="altColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketEntry</code></td>
+<td class="colLast"><span class="typeNameLabel">BucketProtoUtils.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry-">toPB</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&nbsp;entry)</code>&nbsp;</td>
+</tr>
 </tbody>
 </table>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
@@ -208,6 +216,10 @@
 <div class="block">Attempt to add the specified entry to this queue.</div>
 </td>
 </tr>
+<tr class="altColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap</code></td>
+<td class="colLast"><span class="typeNameLabel">BucketProtoUtils.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-java.util.Map-">toPB</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;&nbsp;backingMap)</code>&nbsp;</td>
+</tr>
 </tbody>
 </table>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing constructors, and an explanation">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketCache.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketCache.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketCache.html
index 5dfabc7..82dac25 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketCache.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketCache.html
@@ -136,6 +136,19 @@
 </tr>
 </tbody>
 </table>
+<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
+<caption><span>Methods in <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html">org.apache.hadoop.hbase.io.hfile.bucket</a> with parameters of type <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a></span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Method and Description</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colFirst"><code>(package private) static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry</code></td>
+<td class="colLast"><span class="typeNameLabel">BucketProtoUtils.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.bucket.BucketCache-">toPB</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a>&nbsp;cache)</code>&nbsp;</td>
+</tr>
+</tbody>
+</table>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing constructors, and an explanation">
 <caption><span>Constructors in <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html">org.apache.hadoop.hbase.io.hfile.bucket</a> with parameters of type <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a></span><span class="tabEnd">&nbsp;</span></caption>
 <tr>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketProtoUtils.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketProtoUtils.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketProtoUtils.html
new file mode 100644
index 0000000..3673bda
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketProtoUtils.html
@@ -0,0 +1,125 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<!-- NewPage -->
+<html lang="en">
+<head>
+<!-- Generated by javadoc -->
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>Uses of Class org.apache.hadoop.hbase.io.hfile.bucket.BucketProtoUtils (Apache HBase 3.0.0-SNAPSHOT API)</title>
+<link rel="stylesheet" type="text/css" href="../../../../../../../../stylesheet.css" title="Style">
+<script type="text/javascript" src="../../../../../../../../script.js"></script>
+</head>
+<body>
+<script type="text/javascript"><!--
+    try {
+        if (location.href.indexOf('is-external=true') == -1) {
+            parent.document.title="Uses of Class org.apache.hadoop.hbase.io.hfile.bucket.BucketProtoUtils (Apache HBase 3.0.0-SNAPSHOT API)";
+        }
+    }
+    catch(err) {
+    }
+//-->
+</script>
+<noscript>
+<div>JavaScript is disabled on your browser.</div>
+</noscript>
+<!-- ========= START OF TOP NAVBAR ======= -->
+<div class="topNav"><a name="navbar.top">
+<!--   -->
+</a>
+<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
+<a name="navbar.top.firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
+<li><a href="../package-summary.html">Package</a></li>
+<li><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">Class</a></li>
+<li class="navBarCell1Rev">Use</li>
+<li><a href="../../../../../../../../overview-tree.html">Tree</a></li>
+<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
+<li><a href="../../../../../../../../index-all.html">Index</a></li>
+<li><a href="../../../../../../../../help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li>Prev</li>
+<li>Next</li>
+</ul>
+<ul class="navList">
+<li><a href="../../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketProtoUtils.html" target="_top">Frames</a></li>
+<li><a href="BucketProtoUtils.html" target="_top">No&nbsp;Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_top">
+<li><a href="../../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_top");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<a name="skip.navbar.top">
+<!--   -->
+</a></div>
+<!-- ========= END OF TOP NAVBAR ========= -->
+<div class="header">
+<h2 title="Uses of Class org.apache.hadoop.hbase.io.hfile.bucket.BucketProtoUtils" class="title">Uses of Class<br>org.apache.hadoop.hbase.io.hfile.bucket.BucketProtoUtils</h2>
+</div>
+<div class="classUseContainer">No usage of org.apache.hadoop.hbase.io.hfile.bucket.BucketProtoUtils</div>
+<!-- ======= START OF BOTTOM NAVBAR ====== -->
+<div class="bottomNav"><a name="navbar.bottom">
+<!--   -->
+</a>
+<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
+<a name="navbar.bottom.firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
+<li><a href="../package-summary.html">Package</a></li>
+<li><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">Class</a></li>
+<li class="navBarCell1Rev">Use</li>
+<li><a href="../../../../../../../../overview-tree.html">Tree</a></li>
+<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
+<li><a href="../../../../../../../../index-all.html">Index</a></li>
+<li><a href="../../../../../../../../help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li>Prev</li>
+<li>Next</li>
+</ul>
+<ul class="navList">
+<li><a href="../../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketProtoUtils.html" target="_top">Frames</a></li>
+<li><a href="BucketProtoUtils.html" target="_top">No&nbsp;Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_bottom">
+<li><a href="../../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_bottom");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<a name="skip.navbar.bottom">
+<!--   -->
+</a></div>
+<!-- ======== END OF BOTTOM NAVBAR ======= -->
+<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2018 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
+</body>
+</html>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/CacheFullException.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/CacheFullException.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/CacheFullException.html
index b85f6bd..9939e77 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/CacheFullException.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/CacheFullException.html
@@ -113,9 +113,8 @@
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.RAMQueueEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
+<td class="colLast"><span class="typeNameLabel">BucketCache.RAMQueueEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
             <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a>&nbsp;bucketAllocator,
-            <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a>&nbsp;realCacheSize)</code>&nbsp;</td>
 </tr>
 </tbody>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/IOEngine.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/IOEngine.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/IOEngine.html
index 3e68961..8416c45 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/IOEngine.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/IOEngine.html
@@ -166,9 +166,8 @@
 <tbody>
 <tr class="altColor">
 <td class="colFirst"><code><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.RAMQueueEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
+<td class="colLast"><span class="typeNameLabel">BucketCache.RAMQueueEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
             <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a>&nbsp;bucketAllocator,
-            <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a>&nbsp;realCacheSize)</code>&nbsp;</td>
 </tr>
 </tbody>


[02/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html
index 1e10092..415dcdd 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>protected class <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.115">HBaseClusterManager.RemoteShell</a>
+<pre>protected class <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.116">HBaseClusterManager.RemoteShell</a>
 extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <div class="block">Executes commands over SSH</div>
 </li>
@@ -291,7 +291,7 @@ extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>hostname</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.116">hostname</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.117">hostname</a></pre>
 </li>
 </ul>
 <a name="user">
@@ -300,7 +300,7 @@ extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>user</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.117">user</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.118">user</a></pre>
 </li>
 </ul>
 </li>
@@ -317,7 +317,7 @@ extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>RemoteShell</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.119">RemoteShell</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.120">RemoteShell</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;execString,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/io/File.html?is-external=true" title="class or interface in java.io">File</a>&nbsp;dir,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;env,
@@ -330,7 +330,7 @@ extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>RemoteShell</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.125">RemoteShell</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.126">RemoteShell</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;execString,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/io/File.html?is-external=true" title="class or interface in java.io">File</a>&nbsp;dir,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;env)</pre>
@@ -342,7 +342,7 @@ extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>RemoteShell</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.130">RemoteShell</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.131">RemoteShell</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;execString,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/io/File.html?is-external=true" title="class or interface in java.io">File</a>&nbsp;dir)</pre>
 </li>
@@ -353,7 +353,7 @@ extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>RemoteShell</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.135">RemoteShell</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.136">RemoteShell</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;execString)</pre>
 </li>
 </ul>
@@ -363,7 +363,7 @@ extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>RemoteShell</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.140">RemoteShell</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.141">RemoteShell</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;user,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;execString)</pre>
 </li>
@@ -382,7 +382,7 @@ extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>getExecString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.147">getExecString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.148">getExecString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code>getExecString</code>&nbsp;in class&nbsp;<code>org.apache.hadoop.util.Shell.ShellCommandExecutor</code></dd>
@@ -395,7 +395,7 @@ extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>execute</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.156">execute</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html#line.157">execute</a>()
              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html
index 0df4a6c..bc063c5 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.247">HBaseClusterManager.ZookeeperShellCommandProvider</a>
+<pre>static class <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.248">HBaseClusterManager.ZookeeperShellCommandProvider</a>
 extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html" title="class in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider</a></pre>
 <div class="block">CommandProvider to manage the service using bin/zk* scripts.</div>
 </li>
@@ -238,7 +238,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockList">
 <li class="blockList">
 <h4>zookeeperHome</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html#line.248">zookeeperHome</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html#line.249">zookeeperHome</a></pre>
 </li>
 </ul>
 <a name="confDir">
@@ -247,7 +247,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockListLast">
 <li class="blockList">
 <h4>confDir</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html#line.249">confDir</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html#line.250">confDir</a></pre>
 </li>
 </ul>
 </li>
@@ -264,7 +264,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ZookeeperShellCommandProvider</h4>
-<pre><a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html#line.251">ZookeeperShellCommandProvider</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
+<pre><a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html#line.252">ZookeeperShellCommandProvider</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -286,7 +286,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockList">
 <li class="blockList">
 <h4>getCommand</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html#line.268">getCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html#line.269">getCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                          <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a>&nbsp;op)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -300,7 +300,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockListLast">
 <li class="blockList">
 <h4>findPidCommand</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html#line.273">findPidCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service)</pre>
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.ZookeeperShellCommandProvider.html#line.274">findPidCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#findPidCommand-org.apache.hadoop.hbase.ClusterManager.ServiceType-">findPidCommand</a></code>&nbsp;in class&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html" title="class in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.html b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.html
index 2621e34..1f7d36f 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.html
@@ -586,7 +586,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HBaseClusterManager</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.279">HBaseClusterManager</a>()</pre>
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.280">HBaseClusterManager</a>()</pre>
 </li>
 </ul>
 </li>
@@ -627,7 +627,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>getCommandProvider</h4>
-<pre>protected&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html" title="class in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.282">getCommandProvider</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service)
+<pre>protected&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html" title="class in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.283">getCommandProvider</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service)
                                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -641,7 +641,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>exec</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.util.Pair&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.298">exec</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>private&nbsp;org.apache.hadoop.hbase.util.Pair&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.300">exec</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                                                                <a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>...&nbsp;cmd)
                                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -660,7 +660,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>execWithRetries</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.util.Pair&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.319">execWithRetries</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>private&nbsp;org.apache.hadoop.hbase.util.Pair&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.321">execWithRetries</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                                                                           <a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                                                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>...&nbsp;cmd)
                                                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -678,7 +678,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>retryOrThrow</h4>
-<pre>private&nbsp;&lt;E extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a>&gt;&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.337">retryOrThrow</a>(org.apache.hadoop.hbase.util.RetryCounter&nbsp;retryCounter,
+<pre>private&nbsp;&lt;E extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a>&gt;&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.339">retryOrThrow</a>(org.apache.hadoop.hbase.util.RetryCounter&nbsp;retryCounter,
                                                 E&nbsp;ex,
                                                 <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                                                 <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;cmd)
@@ -695,7 +695,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>exec</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.348">exec</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.350">exec</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                   <a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                   <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a>&nbsp;op)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -711,7 +711,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>start</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.353">start</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.355">start</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                   int&nbsp;port)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -731,7 +731,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>stop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.358">stop</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.360">stop</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                  int&nbsp;port)
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -751,7 +751,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>restart</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.363">restart</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.365">restart</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                     int&nbsp;port)
              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -771,7 +771,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>signal</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.367">signal</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.369">signal</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;signal,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname)
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -787,7 +787,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>isRunning</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.372">isRunning</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.374">isRunning</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                          int&nbsp;port)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -808,7 +808,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>kill</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.379">kill</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.381">kill</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                  int&nbsp;port)
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -828,7 +828,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>suspend</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.384">suspend</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.386">suspend</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                     int&nbsp;port)
              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -848,7 +848,7 @@ implements <a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>resume</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.389">resume</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.391">resume</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                    int&nbsp;port)
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html b/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html
index 489e28f..cb9a240a 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html
@@ -131,7 +131,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.129">MiniHBaseCluster.MiniHBaseClusterRegionServer</a>
+<pre>public static class <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.130">MiniHBaseCluster.MiniHBaseClusterRegionServer</a>
 extends org.apache.hadoop.hbase.regionserver.HRegionServer</pre>
 <div class="block">Subclass so can get at protected methods (none at moment).  Also, creates
  a FileSystem instance per instantiation.  Adds a shutdown own FileSystem
@@ -298,7 +298,7 @@ extends org.apache.hadoop.hbase.regionserver.HRegionServer</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>shutdownThread</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true" title="class or interface in java.lang">Thread</a> <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.130">shutdownThread</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true" title="class or interface in java.lang">Thread</a> <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.131">shutdownThread</a></pre>
 </li>
 </ul>
 <a name="user">
@@ -307,7 +307,7 @@ extends org.apache.hadoop.hbase.regionserver.HRegionServer</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>user</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.security.User <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.131">user</a></pre>
+<pre>private&nbsp;org.apache.hadoop.hbase.security.User <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.132">user</a></pre>
 </li>
 </ul>
 <a name="killedServers">
@@ -316,7 +316,7 @@ extends org.apache.hadoop.hbase.regionserver.HRegionServer</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>killedServers</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;org.apache.hadoop.hbase.ServerName&gt; <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.137">killedServers</a></pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;org.apache.hadoop.hbase.ServerName&gt; <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.138">killedServers</a></pre>
 <div class="block">List of RegionServers killed so far. ServerName also comprises startCode of a server,
  so any restarted instances of the same server will have different ServerName and will not
  coincide with past dead ones. So there's no need to cleanup this list.</div>
@@ -336,7 +336,7 @@ extends org.apache.hadoop.hbase.regionserver.HRegionServer</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MiniHBaseClusterRegionServer</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.139">MiniHBaseClusterRegionServer</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.140">MiniHBaseClusterRegionServer</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <dl>
@@ -360,7 +360,7 @@ extends org.apache.hadoop.hbase.regionserver.HRegionServer</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>handleReportForDutyResponse</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.154">handleReportForDutyResponse</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse&nbsp;c)
+<pre>protected&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.155">handleReportForDutyResponse</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse&nbsp;c)
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
@@ -376,7 +376,7 @@ extends org.apache.hadoop.hbase.regionserver.HRegionServer</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.162">run</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.163">run</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--" title="class or interface in java.lang">run</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true" title="class or interface in java.lang">Runnable</a></code></dd>
@@ -391,7 +391,7 @@ extends org.apache.hadoop.hbase.regionserver.HRegionServer</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>runRegionServer</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.182">runRegionServer</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.183">runRegionServer</a>()</pre>
 </li>
 </ul>
 <a name="kill--">
@@ -400,7 +400,7 @@ extends org.apache.hadoop.hbase.regionserver.HRegionServer</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>kill</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.187">kill</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.188">kill</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code>kill</code>&nbsp;in class&nbsp;<code>org.apache.hadoop.hbase.regionserver.HRegionServer</code></dd>
@@ -413,7 +413,7 @@ extends org.apache.hadoop.hbase.regionserver.HRegionServer</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>abort</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.193">abort</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;reason,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.194">abort</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;reason,
                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;cause)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -429,7 +429,7 @@ extends org.apache.hadoop.hbase.regionserver.HRegionServer</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>abortRegionServer</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.203">abortRegionServer</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;reason,
+<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.MiniHBaseClusterRegionServer.html#line.204">abortRegionServer</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;reason,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;cause)</pre>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html b/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html
index 0778813..795c788 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.212">MiniHBaseCluster.SingleFileSystemShutdownThread</a>
+<pre>static class <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html#line.213">MiniHBaseCluster.SingleFileSystemShutdownThread</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true" title="class or interface in java.lang">Thread</a></pre>
 <div class="block">Alternate shutdown hook.
  Just shuts down the passed fs, not all as default filesystem hook does.</div>
@@ -241,7 +241,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fs</h4>
-<pre>private final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html#line.213">fs</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html#line.214">fs</a></pre>
 </li>
 </ul>
 </li>
@@ -258,7 +258,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SingleFileSystemShutdownThread</h4>
-<pre><a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html#line.214">SingleFileSystemShutdownThread</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
+<pre><a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html#line.215">SingleFileSystemShutdownThread</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
 </li>
 </ul>
 </li>
@@ -275,7 +275,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html#line.219">run</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html#line.220">run</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--" title="class or interface in java.lang">run</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true" title="class or interface in java.lang">Runnable</a></code></dd>


[27/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
index b7b4236..3d1edb3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
@@ -259,1863 +259,1867 @@
 <span class="sourceLineNo">251</span>   * + Metadata!  + &lt;= See note on BLOCK_METADATA_SPACE above.<a name="line.251"></a>
 <span class="sourceLineNo">252</span>   * ++++++++++++++<a name="line.252"></a>
 <span class="sourceLineNo">253</span>   * &lt;/code&gt;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer)<a name="line.254"></a>
+<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer, boolean)<a name="line.254"></a>
 <span class="sourceLineNo">255</span>   */<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER =<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    @Override<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      // The buf has the file block followed by block metadata.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      ByteBuff newByteBuff;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      if (reuse) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        newByteBuff = buf.slice();<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      } else {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        int len = buf.limit();<a name="line.269"></a>
-<span class="sourceLineNo">270</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      buf.position(buf.limit());<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      long offset = buf.getLong();<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      HFileBlock hFileBlock =<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      return hFileBlock;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    public int getDeserialiserIdentifier() {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return DESERIALIZER_IDENTIFIER;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    @Override<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      // Used only in tests<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  };<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  static {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    DESERIALIZER_IDENTIFIER =<a name="line.298"></a>
-<span class="sourceLineNo">299</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>  }<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  /**<a name="line.302"></a>
-<span class="sourceLineNo">303</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.303"></a>
-<span class="sourceLineNo">304</span>   */<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private HFileBlock(HFileBlock that) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    this(that, false);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
-<span class="sourceLineNo">308</span><a name="line.308"></a>
-<span class="sourceLineNo">309</span>  /**<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.310"></a>
-<span class="sourceLineNo">311</span>   * param.<a name="line.311"></a>
-<span class="sourceLineNo">312</span>   */<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.314"></a>
-<span class="sourceLineNo">315</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    if (bufCopy) {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    } else {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.buf = that.buf.duplicate();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    }<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * is used only while writing blocks and caching,<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   *<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.331"></a>
+<span class="sourceLineNo">256</span>  public static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER = new BlockDeserializer();<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  public static final class BlockDeserializer implements CacheableDeserializer&lt;Cacheable&gt; {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    private BlockDeserializer() {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>    @Override<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        throws IOException {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      // The buf has the file block followed by block metadata.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      ByteBuff newByteBuff;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      if (reuse) {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>        newByteBuff = buf.slice();<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      } else {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        int len = buf.limit();<a name="line.273"></a>
+<span class="sourceLineNo">274</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      buf.position(buf.limit());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      long offset = buf.getLong();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      HFileBlock hFileBlock =<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      return hFileBlock;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>    @Override<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    public int getDeserialiserIdentifier() {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      return DESERIALIZER_IDENTIFIER;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>    @Override<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      // Used only in tests<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  static {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    DESERIALIZER_IDENTIFIER =<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  }<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /**<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   */<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private HFileBlock(HFileBlock that) {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    this(that, false);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * param.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.318"></a>
+<span class="sourceLineNo">319</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    if (bufCopy) {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    } else {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>      this.buf = that.buf.duplicate();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    }<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * is used only while writing blocks and caching,<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.331"></a>
 <span class="sourceLineNo">332</span>   *<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @param offset the file offset the block was read from<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   * @param fileContext HFile meta data<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  @VisibleForTesting<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      HFileContext fileContext) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    this.buf = new SingleByteBuff(b);<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    if (fillHeader) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      overwriteHeader();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    this.buf.rewind();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  /**<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * to that point.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>    buf.rewind();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    final BlockType blockType = BlockType.read(buf);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    final int uncompressedSizeWithoutHeader =<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    int onDiskDataSizeWithHeader;<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    if (usesHBaseChecksum) {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    fileContext = fileContextBuilder.build();<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    this.memType = memType;<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    this.offset = offset;<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    this.buf = buf;<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    this.buf.rewind();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  }<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  /**<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   * Called from constructors.<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      HFileContext fileContext) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    this.blockType = blockType;<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    this.offset = offset;<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    this.fileContext = fileContext;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  /**<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * Parse total on disk size including header and checksum.<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   * @return Size of the block with header included.<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   */<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      boolean verifyChecksum) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      headerSize(verifyChecksum);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * a read of the next block when scanning or running over a file.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  int getNextBlockOnDiskSize() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    return nextBlockOnDiskSize;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  @Override<a name="line.439"></a>
-<span class="sourceLineNo">440</span>  public BlockType getBlockType() {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    return blockType;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  short getDataBlockEncodingId() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    return buf.getShort(headerSize());<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  }<a name="line.451"></a>
-<span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>  /**<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   * @return the on-disk size of header + data part + checksum.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>   */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  public int getOnDiskSizeWithHeader() {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  int getOnDiskSizeWithoutHeader() {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    return onDiskSizeWithoutHeader;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>  }<a name="line.465"></a>
-<span class="sourceLineNo">466</span><a name="line.466"></a>
-<span class="sourceLineNo">467</span>  /**<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.468"></a>
-<span class="sourceLineNo">469</span>   */<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   int getUncompressedSizeWithoutHeader() {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    return uncompressedSizeWithoutHeader;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /**<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   *         -1 if unknown<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  long getPrevBlockOffset() {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    return prevBlockOffset;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>  }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>  /**<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * is modified as side-effect.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   */<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  private void overwriteHeader() {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    buf.rewind();<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    blockType.write(buf);<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    buf.putLong(prevBlockOffset);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>  }<a name="line.497"></a>
-<span class="sourceLineNo">498</span><a name="line.498"></a>
-<span class="sourceLineNo">499</span>  /**<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * Returns a buffer that does not include the header or checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   *<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    ByteBuff dup = getBufferReadOnly();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    // Now set it up so Buffer spans content only -- no header or no checksums.<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    return dup.position(headerSize()).limit(buf.limit() - totalChecksumBytes()).slice();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Returns a read-only duplicate of the buffer this block stores internally ready to be read.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * Clients must not modify the buffer object though they may set position and limit on the<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   * returned buffer since we pass back a duplicate. This method has to be public because it is used<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * filter lookup, but has to be used with caution. Buffer holds header, block content,<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * and any follow-on checksums if present.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   *<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @return the buffer of this block for read-only operations<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   */<a name="line.519"></a>
-<span class="sourceLineNo">520</span>  public ByteBuff getBufferReadOnly() {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    // TODO: ByteBuf does not support asReadOnlyBuffer(). Fix.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    ByteBuff dup = this.buf.duplicate();<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    assert dup.position() == 0;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return dup;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  @VisibleForTesting<a name="line.527"></a>
-<span class="sourceLineNo">528</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String fieldName) throws IOException {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    if (valueFromBuf != valueFromField) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  @VisibleForTesting<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      throws IOException {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    if (valueFromBuf != valueFromField) {<a name="line.539"></a>
-<span class="sourceLineNo">540</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.540"></a>
-<span class="sourceLineNo">541</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  }<a name="line.543"></a>
-<span class="sourceLineNo">544</span><a name="line.544"></a>
-<span class="sourceLineNo">545</span>  /**<a name="line.545"></a>
-<span class="sourceLineNo">546</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.547"></a>
-<span class="sourceLineNo">548</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * This function is primary for testing and debugging, and is not<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * Used by tests only.<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   */<a name="line.552"></a>
-<span class="sourceLineNo">553</span>  @VisibleForTesting<a name="line.553"></a>
-<span class="sourceLineNo">554</span>  void sanityCheck() throws IOException {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>    // Duplicate so no side-effects<a name="line.555"></a>
-<span class="sourceLineNo">556</span>    ByteBuff dup = this.buf.duplicate().rewind();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    sanityCheckAssertion(BlockType.read(dup), blockType);<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    sanityCheckAssertion(dup.getInt(), onDiskSizeWithoutHeader, "onDiskSizeWithoutHeader");<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    sanityCheckAssertion(dup.getInt(), uncompressedSizeWithoutHeader,<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        "uncompressedSizeWithoutHeader");<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    sanityCheckAssertion(dup.getLong(), prevBlockOffset, "prevBlockOffset");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      sanityCheckAssertion(dup.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      sanityCheckAssertion(dup.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.567"></a>
-<span class="sourceLineNo">568</span>          "bytesPerChecksum");<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      sanityCheckAssertion(dup.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
-<span class="sourceLineNo">571</span><a name="line.571"></a>
-<span class="sourceLineNo">572</span>    int cksumBytes = totalChecksumBytes();<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (dup.limit() != expectedBufLimit) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      throw new AssertionError("Expected limit " + expectedBufLimit + ", got " + dup.limit());<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    int hdrSize = headerSize();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    if (dup.capacity() != expectedBufLimit &amp;&amp; dup.capacity() != expectedBufLimit + hdrSize) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      throw new AssertionError("Invalid buffer capacity: " + dup.capacity() +<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  @Override<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  public String toString() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    StringBuilder sb = new StringBuilder()<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      .append("[")<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      .append("blockType=").append(blockType)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      .append(", fileOffset=").append(offset)<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      .append(", headerSize=").append(headerSize())<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      .append(", onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      .append(", uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      .append(", prevBlockOffset=").append(prevBlockOffset)<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      .append(", isUseHBaseChecksum=").append(fileContext.isUseHBaseChecksum());<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      sb.append(", checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.599"></a>
-<span class="sourceLineNo">600</span>        .append(", bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        .append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      sb.append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    String dataBegin = null;<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    if (buf.hasArray()) {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    } else {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.613"></a>
-<span class="sourceLineNo">614</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>    sb.append(", getOnDiskSizeWithHeader=").append(getOnDiskSizeWithHeader())<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      .append(", totalChecksumBytes=").append(totalChecksumBytes())<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      .append(", isUnpacked=").append(isUnpacked())<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      .append(", buf=[").append(buf).append("]")<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      .append(", dataBeginsWith=").append(dataBegin)<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      .append(", fileContext=").append(fileContext)<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      .append(", nextBlockOnDiskSize=").append(nextBlockOnDiskSize)<a name="line.624"></a>
-<span class="sourceLineNo">625</span>      .append("]");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    return sb.toString();<a name="line.626"></a>
-<span class="sourceLineNo">627</span>  }<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>  /**<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      // encryption details.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();<a name="line.645"></a>
-<span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>    ByteBuff dup = this.buf.duplicate();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    dup.position(this.headerSize());<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    dup = dup.slice();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      dup);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    return unpacked;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  /**<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   * Always allocates a new buffer of the correct size. Copies header bytes<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * from the existing buffer. Does not change header fields.<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   * Reserve room to keep checksum bytes too.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  private void allocateBuffer() {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    int cksumBytes = totalChecksumBytes();<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    int headerSize = headerSize();<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    int capacityNeeded = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // TODO we need consider allocating offheap here?<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);<a name="line.667"></a>
-<span class="sourceLineNo">668</span><a name="line.668"></a>
-<span class="sourceLineNo">669</span>    // Copy header bytes into newBuf.<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    // newBuf is HBB so no issue in calling array()<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    buf.position(0);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    buf.get(newBuf.array(), newBuf.arrayOffset(), headerSize);<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    buf = new SingleByteBuff(newBuf);<a name="line.674"></a>
-<span class="sourceLineNo">675</span>    // set limit to exclude next block's header<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * Return true when this block's buffer has been unpacked, false otherwise. Note this is a<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   * calculated heuristic, not tracked attribute of the block.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>   */<a name="line.682"></a>
-<span class="sourceLineNo">683</span>  public boolean isUnpacked() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>    final int cksumBytes = totalChecksumBytes();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    final int headerSize = headerSize();<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    final int bufCapacity = buf.capacity();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;<a name="line.688"></a>
-<span class="sourceLineNo">689</span>  }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>  /** An additional sanity-check in case no compression or encryption is being used. */<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  @VisibleForTesting<a name="line.692"></a>
-<span class="sourceLineNo">693</span>  void sanityCheckUncompressedSize() throws IOException {<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader + totalChecksumBytes()) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      throw new IOException("Using no compression but "<a name="line.695"></a>
-<span class="sourceLineNo">696</span>          + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "<a name="line.696"></a>
-<span class="sourceLineNo">697</span>          + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader<a name="line.697"></a>
-<span class="sourceLineNo">698</span>          + ", numChecksumbytes=" + totalChecksumBytes());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    }<a name="line.699"></a>
-<span class="sourceLineNo">700</span>  }<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>  /**<a name="line.702"></a>
-<span class="sourceLineNo">703</span>   * Cannot be {@link #UNSET}. Must be a legitimate value. Used re-making the {@link BlockCacheKey} when<a name="line.703"></a>
-<span class="sourceLineNo">704</span>   * block is returned to the cache.<a name="line.704"></a>
-<span class="sourceLineNo">705</span>   * @return the offset of this block in the file it was read from<a name="line.705"></a>
-<span class="sourceLineNo">706</span>   */<a name="line.706"></a>
-<span class="sourceLineNo">707</span>  long getOffset() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    if (offset &lt; 0) {<a name="line.708"></a>
-<span class="sourceLineNo">709</span>      throw new IllegalStateException("HFile block offset not initialized properly");<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    }<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    return offset;<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  /**<a name="line.714"></a>
-<span class="sourceLineNo">715</span>   * @return a byte stream reading the data + checksum of this block<a name="line.715"></a>
-<span class="sourceLineNo">716</span>   */<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  DataInputStream getByteStream() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    ByteBuff dup = this.buf.duplicate();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    dup.position(this.headerSize());<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new DataInputStream(new ByteBuffInputStream(dup));<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>  public long heapSize() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    long size = ClassSize.align(<a name="line.725"></a>
-<span class="sourceLineNo">726</span>        ClassSize.OBJECT +<a name="line.726"></a>
-<span class="sourceLineNo">727</span>        // Block type, multi byte buffer, MemoryType and meta references<a name="line.727"></a>
-<span class="sourceLineNo">728</span>        4 * ClassSize.REFERENCE +<a name="line.728"></a>
-<span class="sourceLineNo">729</span>        // On-disk size, uncompressed size, and next block's on-disk size<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        // bytePerChecksum and onDiskDataSize<a name="line.730"></a>
-<span class="sourceLineNo">731</span>        4 * Bytes.SIZEOF_INT +<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        // This and previous block offset<a name="line.732"></a>
-<span class="sourceLineNo">733</span>        2 * Bytes.SIZEOF_LONG +<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        // Heap size of the meta object. meta will be always not null.<a name="line.734"></a>
-<span class="sourceLineNo">735</span>        fileContext.heapSize()<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    );<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    if (buf != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      // Deep overhead of the byte buffer. Needs to be aligned separately.<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    return ClassSize.align(size);<a name="line.743"></a>
-<span class="sourceLineNo">744</span>  }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>  /**<a name="line.746"></a>
-<span class="sourceLineNo">747</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.747"></a>
-<span class="sourceLineNo">748</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.748"></a>
-<span class="sourceLineNo">749</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but specifies a<a name="line.749"></a>
-<span class="sourceLineNo">750</span>   * number of "extra" bytes to also optionally read.<a name="line.750"></a>
-<span class="sourceLineNo">751</span>   *<a name="line.751"></a>
-<span class="sourceLineNo">752</span>   * @param in the input stream to read from<a name="line.752"></a>
-<span class="sourceLineNo">753</span>   * @param buf the buffer to read into<a name="line.753"></a>
-<span class="sourceLineNo">754</span>   * @param bufOffset the destination offset in the buffer<a name="line.754"></a>
-<span class="sourceLineNo">755</span>   * @param necessaryLen the number of bytes that are absolutely necessary to read<a name="line.755"></a>
-<span class="sourceLineNo">756</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.756"></a>
-<span class="sourceLineNo">757</span>   * @return true if succeeded reading the extra bytes<a name="line.757"></a>
-<span class="sourceLineNo">758</span>   * @throws IOException if failed to read the necessary bytes<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   */<a name="line.759"></a>
-<span class="sourceLineNo">760</span>  static boolean readWithExtra(InputStream in, byte[] buf,<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      int bufOffset, int necessaryLen, int extraLen) throws IOException {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    while (bytesRemaining &gt; 0) {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      int ret = in.read(buf, bufOffset, bytesRemaining);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      if (ret == -1 &amp;&amp; bytesRemaining &lt;= extraLen) {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        // We could not read the "extra data", but that is OK.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>        break;<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      }<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (ret &lt; 0) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>        throw new IOException("Premature EOF from inputStream (read "<a name="line.770"></a>
-<span class="sourceLineNo">771</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.771"></a>
-<span class="sourceLineNo">772</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.772"></a>
-<span class="sourceLineNo">773</span>            + "successfully read "<a name="line.773"></a>
-<span class="sourceLineNo">774</span>            + (necessaryLen + extraLen - bytesRemaining));<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      }<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      bufOffset += ret;<a name="line.776"></a>
-<span class="sourceLineNo">777</span>      bytesRemaining -= ret;<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    }<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    return bytesRemaining &lt;= 0;<a name="line.779"></a>
-<span class="sourceLineNo">780</span>  }<a name="line.780"></a>
-<span class="sourceLineNo">781</span><a name="line.781"></a>
-<span class="sourceLineNo">782</span>  /**<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.784"></a>
-<span class="sourceLineNo">785</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but uses<a name="line.785"></a>
-<span class="sourceLineNo">786</span>   * positional read and specifies a number of "extra" bytes that would be<a name="line.786"></a>
-<span class="sourceLineNo">787</span>   * desirable but not absolutely necessary to read.<a name="line.787"></a>
-<span class="sourceLineNo">788</span>   *<a name="line.788"></a>
-<span class="sourceLineNo">789</span>   * @param in the input stream to read from<a name="line.789"></a>
-<span class="sourceLineNo">790</span>   * @param position the position within the stream from which to start reading<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * @param buf the buffer to read into<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * @param bufOffset the destination offset in the buffer<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   * @param necessaryLen the number of bytes that are absolutely necessary to<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   *     read<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   * @return true if and only if extraLen is &gt; 0 and reading those extra bytes<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   *     was successful<a name="line.797"></a>
-<span class="sourceLineNo">798</span>   * @throws IOException if failed to read the necessary bytes<a name="line.798"></a>
-<span class="sourceLineNo">799</span>   */<a name="line.799"></a>
-<span class="sourceLineNo">800</span>  @VisibleForTesting<a name="line.800"></a>
-<span class="sourceLineNo">801</span>  static boolean positionalReadWithExtra(FSDataInputStream in,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      long position, byte[] buf, int bufOffset, int necessaryLen, int extraLen)<a name="line.802"></a>
-<span class="sourceLineNo">803</span>      throws IOException {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>    int bytesRead = 0;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    while (bytesRead &lt; necessaryLen) {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      int ret = in.read(position, buf, bufOffset, bytesRemaining);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>      if (ret &lt; 0) {<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        throw new IOException("Premature EOF from inputStream (positional read "<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            + "successfully read " + bytesRead);<a name="line.812"></a>
-<span class="sourceLineNo">813</span>      }<a name="line.813"></a>
-<span class="sourceLineNo">814</span>      position += ret;<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      bufOffset += ret;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      bytesRemaining -= ret;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      bytesRead += ret;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>    }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    return bytesRead != necessaryLen &amp;&amp; bytesRemaining &lt;= 0;<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>   * Unified version 2 {@link HFile} block writer. The intended usage pattern<a name="line.823"></a>
-<span class="sourceLineNo">824</span>   * is as follows:<a name="line.824"></a>
-<span class="sourceLineNo">825</span>   * &lt;ol&gt;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>   * &lt;li&gt;Construct an {@link HFileBlock.Writer}, providing a compression algorithm.<a name="line.826"></a>
-<span class="sourceLineNo">827</span>   * &lt;li&gt;Call {@link Writer#startWriting} and get a data stream to write to.<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * &lt;li&gt;Write your data into the stream.<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * &lt;li&gt;Call Writer#writeHeaderAndData(FSDataOutputStream) as many times as you need to.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   * store the serialized block into an external stream.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * &lt;li&gt;Repeat to write more blocks.<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   * &lt;/ol&gt;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   * &lt;p&gt;<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   */<a name="line.834"></a>
-<span class="sourceLineNo">835</span>  static class Writer {<a name="line.835"></a>
-<span class="sourceLineNo">836</span>    private enum State {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>      INIT,<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      WRITING,<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      BLOCK_READY<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
-<span class="sourceLineNo">841</span><a name="line.841"></a>
-<span class="sourceLineNo">842</span>    /** Writer state. Used to ensure the correct usage protocol. */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    private State state = State.INIT;<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>    /** Data block encoder used for data blocks */<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    private final HFileDataBlockEncoder dataBlockEncoder;<a name="line.846"></a>
-<span class="sourceLineNo">847</span><a name="line.847"></a>
-<span class="sourceLineNo">848</span>    private HFileBlockEncodingContext dataBlockEncodingCtx;<a name="line.848"></a>
-<span class="sourceLineNo">849</span><a name="line.849"></a>
-<span class="sourceLineNo">850</span>    /** block encoding context for non-data blocks*/<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;<a name="line.851"></a>
-<span class="sourceLineNo">852</span><a name="line.852"></a>
-<span class="sourceLineNo">853</span>    /**<a name="line.853"></a>
-<span class="sourceLineNo">854</span>     * The stream we use to accumulate data into a block in an uncompressed format.<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * We reset this stream at the end of each block and reuse it. The<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * header is written as the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes into this<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * stream.<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    private ByteArrayOutputStream baosInMemory;<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>    /**<a name="line.861"></a>
-<span class="sourceLineNo">862</span>     * Current block type. Set in {@link #startWriting(BlockType)}. Could be<a name="line.862"></a>
-<span class="sourceLineNo">863</span>     * changed in {@link #finishBlock()} from {@link BlockType#DATA}<a name="line.863"></a>
-<span class="sourceLineNo">864</span>     * to {@link BlockType#ENCODED_DATA}.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>     */<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    private BlockType blockType;<a name="line.866"></a>
-<span class="sourceLineNo">867</span><a name="line.867"></a>
-<span class="sourceLineNo">868</span>    /**<a name="line.868"></a>
-<span class="sourceLineNo">869</span>     * A stream that we write uncompressed bytes to, which compresses them and<a name="line.869"></a>
-<span class="sourceLineNo">870</span>     * writes them to {@link #baosInMemory}.<a name="line.870"></a>
-<span class="sourceLineNo">871</span>     */<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    private DataOutputStream userDataStream;<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // Size of actual data being written. Not considering the block encoding/compression. This<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    // includes the header size also.<a name="line.875"></a>
-<span class="sourceLineNo">876</span>    private int unencodedDataSizeWritten;<a name="line.876"></a>
+<span class="sourceLineNo">333</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.335"></a>
+<span class="sourceLineNo">336</span>   *<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * @param offset the file offset the block was read from<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * @param fileContext HFile meta data<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      HFileContext fileContext) {<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.352"></a>
+<span class="sourceLineNo">353</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    this.buf = new SingleByteBuff(b);<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    if (fillHeader) {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      overwriteHeader();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    this.buf.rewind();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * to that point.<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>    buf.rewind();<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    final BlockType blockType = BlockType.read(buf);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    final int uncompressedSizeWithoutHeader =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    int onDiskDataSizeWithHeader;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    if (usesHBaseChecksum) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    } else {<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    fileContext = fileContextBuilder.build();<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    this.memType = memType;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    this.offset = offset;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    this.buf = buf;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    this.buf.rewind();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  /**<a name="line.405"></a>
+<span class="sourceLineNo">406</span>   * Called from constructors.<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      HFileContext fileContext) {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>    this.blockType = blockType;<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    this.offset = offset;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    this.fileContext = fileContext;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * Parse total on disk size including header and checksum.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   * @return Size of the block with header included.<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      boolean verifyChecksum) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      headerSize(verifyChecksum);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.435"></a>
+<span class="sourceLineNo">436</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.436"></a>
+<span class="sourceLineNo">437</span>   * a read of the next block when scanning or running over a file.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  int getNextBlockOnDiskSize() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>    return nextBlockOnDiskSize;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  public BlockType getBlockType() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    return blockType;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  short getDataBlockEncodingId() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>    }<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    return buf.getShort(headerSize());<a name="line.454"></a>
+<span class="sourceLineNo">455</span>  }<a name="line.455"></a>
+<span class="sourceLineNo">456</span><a name="line.456"></a>
+<span class="sourceLineNo">457</span>  /**<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   * @return the on-disk size of header + data part + checksum.<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   */<a name="line.459"></a>
+<span class="sourceLineNo">460</span>  public int getOnDiskSizeWithHeader() {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.461"></a>
+<span class="sourceLineNo">462</span>  }<a name="line.462"></a>
+<span class="sourceLineNo">463</span><a name="line.463"></a>
+<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
+<span class="sourceLineNo">465</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.465"></a>
+<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  int getOnDiskSizeWithoutHeader() {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return onDiskSizeWithoutHeader;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   int getUncompressedSizeWithoutHeader() {<a name="line.474"></a>
+<span class="sourceLineNo">475</span>    return uncompressedSizeWithoutHeader;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>  }<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>  /**<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   *         -1 if unknown<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   */<a name="line.481"></a>
+<span class="sourceLineNo">482</span>  long getPrevBlockOffset() {<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    return prevBlockOffset;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /**<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * is modified as side-effect.<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   */<a name="line.489"></a>
+<span class="sourceLineNo">490</span>  private void overwriteHeader() {<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    buf.rewind();<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    blockType.write(buf);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    buf.putLong(prevBlockOffset);<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.498"></a>
+<span class="sourceLineNo">499</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    }<a name="line.500"></a>
+<span class="sourceLineNo">501</span>  }<a name="line.501"></a>
+<span class="sourceLineNo">502</span><a name="line.502"></a>
+<span class="sourceLineNo">503</span>  /**<a name="l

<TRUNCATED>

[48/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/checkstyle-aggregate.html
----------------------------------------------------------------------
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 0b79690..09486bc 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Checkstyle Results</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -271,7 +271,7 @@
                                   
             <div class="section">
 <h2><a name="Checkstyle_Results"></a>Checkstyle Results</h2>
-<p>The following document contains the results of <a class="externalLink" href="http://checkstyle.sourceforge.net/">Checkstyle</a> 8.2 with hbase/checkstyle.xml ruleset.&#160;<a href="checkstyle.rss"><img alt="rss feed" src="images/rss.png" /></a></p></div>
+<p>The following document contains the results of <a class="externalLink" href="http://checkstyle.sourceforge.net/">Checkstyle</a> 8.11 with hbase/checkstyle.xml ruleset.&#160;<a href="checkstyle.rss"><img alt="rss feed" src="images/rss.png" /></a></p></div>
 <div class="section">
 <h2><a name="Summary"></a>Summary</h2>
 <table border="0" class="table table-striped">
@@ -281,10 +281,10 @@
 <th><img src="images/icon_warning_sml.gif" alt="" />&#160;Warnings</th>
 <th><img src="images/icon_error_sml.gif" alt="" />&#160;Errors</th></tr>
 <tr class="b">
-<td>3697</td>
+<td>3698</td>
 <td>0</td>
 <td>0</td>
-<td>15626</td></tr></table></div>
+<td>15578</td></tr></table></div>
 <div class="section">
 <h2><a name="Files"></a>Files</h2>
 <table border="0" class="table table-striped">
@@ -434,570 +434,580 @@
 <td>0</td>
 <td>6</td></tr>
 <tr class="b">
+<td><a href="#org.apache.hadoop.hbase.HBaseIOException.java">org/apache/hadoop/hbase/HBaseIOException.java</a></td>
+<td>0</td>
+<td>0</td>
+<td>2</td></tr>
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.HBaseTestCase.java">org/apache/hadoop/hbase/HBaseTestCase.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>25</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.HBaseTestingUtility.java">org/apache/hadoop/hbase/HBaseTestingUtility.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>276</td></tr>
-<tr class="b">
+<td>275</td></tr>
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.HColumnDescriptor.java">org/apache/hadoop/hbase/HColumnDescriptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>40</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.HFilePerformanceEvaluation.java">org/apache/hadoop/hbase/HFilePerformanceEvaluation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>15</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.HRegionInfo.java">org/apache/hadoop/hbase/HRegionInfo.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>59</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.HRegionLocation.java">org/apache/hadoop/hbase/HRegionLocation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.HTableDescriptor.java">org/apache/hadoop/hbase/HTableDescriptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>38</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.HTestConst.java">org/apache/hadoop/hbase/HTestConst.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.HealthChecker.java">org/apache/hadoop/hbase/HealthChecker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.IndividualBytesFieldCell.java">org/apache/hadoop/hbase/IndividualBytesFieldCell.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.IntegrationTestBackupRestore.java">org/apache/hadoop/hbase/IntegrationTestBackupRestore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.IntegrationTestDDLMasterFailover.java">org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>52</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.IntegrationTestIngest.java">org/apache/hadoop/hbase/IntegrationTestIngest.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.IntegrationTestIngestWithACL.java">org/apache/hadoop/hbase/IntegrationTestIngestWithACL.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.IntegrationTestIngestWithEncryption.java">org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.IntegrationTestIngestWithMOB.java">org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.IntegrationTestIngestWithVisibilityLabels.java">org/apache/hadoop/hbase/IntegrationTestIngestWithVisibilityLabels.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.IntegrationTestManyRegions.java">org/apache/hadoop/hbase/IntegrationTestManyRegions.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.IntegrationTestMetaReplicas.java">org/apache/hadoop/hbase/IntegrationTestMetaReplicas.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.IntegrationTestRegionReplicaPerf.java">org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.IntegrationTestingUtility.java">org/apache/hadoop/hbase/IntegrationTestingUtility.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.KeyValue.java">org/apache/hadoop/hbase/KeyValue.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>117</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.KeyValueTestUtil.java">org/apache/hadoop/hbase/KeyValueTestUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.KeyValueUtil.java">org/apache/hadoop/hbase/KeyValueUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>29</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.LocalHBaseCluster.java">org/apache/hadoop/hbase/LocalHBaseCluster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>32</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.MetaMockingUtil.java">org/apache/hadoop/hbase/MetaMockingUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.MetaMutationAnnotation.java">org/apache/hadoop/hbase/MetaMutationAnnotation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.MetaTableAccessor.java">org/apache/hadoop/hbase/MetaTableAccessor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>66</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.MiniHBaseCluster.java">org/apache/hadoop/hbase/MiniHBaseCluster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>25</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.MockRegionServerServices.java">org/apache/hadoop/hbase/MockRegionServerServices.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.MultithreadedTestUtil.java">org/apache/hadoop/hbase/MultithreadedTestUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.NamespaceDescriptor.java">org/apache/hadoop/hbase/NamespaceDescriptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException.java">org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.NotServingRegionException.java">org/apache/hadoop/hbase/NotServingRegionException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.PerformanceEvaluation.java">org/apache/hadoop/hbase/PerformanceEvaluation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>39</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.PerformanceEvaluationCommons.java">org/apache/hadoop/hbase/PerformanceEvaluationCommons.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.PrivateCellUtil.java">org/apache/hadoop/hbase/PrivateCellUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>67</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.QosTestHelper.java">org/apache/hadoop/hbase/QosTestHelper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.RESTApiClusterManager.java">org/apache/hadoop/hbase/RESTApiClusterManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.RegionLoad.java">org/apache/hadoop/hbase/RegionLoad.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.RegionLocations.java">org/apache/hadoop/hbase/RegionLocations.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.RegionStateListener.java">org/apache/hadoop/hbase/RegionStateListener.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ResourceChecker.java">org/apache/hadoop/hbase/ResourceChecker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ScanPerformanceEvaluation.java">org/apache/hadoop/hbase/ScanPerformanceEvaluation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ScheduledChore.java">org/apache/hadoop/hbase/ScheduledChore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.Server.java">org/apache/hadoop/hbase/Server.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ServerLoad.java">org/apache/hadoop/hbase/ServerLoad.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ServerName.java">org/apache/hadoop/hbase/ServerName.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>25</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.SplitLogCounters.java">org/apache/hadoop/hbase/SplitLogCounters.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.SplitLogTask.java">org/apache/hadoop/hbase/SplitLogTask.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.StripeCompactionsPerformanceEvaluation.java">org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TableDescriptors.java">org/apache/hadoop/hbase/TableDescriptors.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TableInfoMissingException.java">org/apache/hadoop/hbase/TableInfoMissingException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TableName.java">org/apache/hadoop/hbase/TableName.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>17</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TableNotDisabledException.java">org/apache/hadoop/hbase/TableNotDisabledException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TableNotEnabledException.java">org/apache/hadoop/hbase/TableNotEnabledException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TableNotFoundException.java">org/apache/hadoop/hbase/TableNotFoundException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TagType.java">org/apache/hadoop/hbase/TagType.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestCellUtil.java">org/apache/hadoop/hbase/TestCellUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestCheckTestClasses.java">org/apache/hadoop/hbase/TestCheckTestClasses.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestClassFinder.java">org/apache/hadoop/hbase/TestClassFinder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestClientClusterStatus.java">org/apache/hadoop/hbase/TestClientClusterStatus.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestClientOperationTimeout.java">org/apache/hadoop/hbase/TestClientOperationTimeout.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestClusterPortAssignment.java">org/apache/hadoop/hbase/TestClusterPortAssignment.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestFSTableDescriptorForceCreation.java">org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestGlobalMemStoreSize.java">org/apache/hadoop/hbase/TestGlobalMemStoreSize.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestHBaseConfiguration.java">org/apache/hadoop/hbase/TestHBaseConfiguration.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestHBaseTestingUtility.java">org/apache/hadoop/hbase/TestHBaseTestingUtility.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestHColumnDescriptor.java">org/apache/hadoop/hbase/TestHColumnDescriptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestHColumnDescriptorDefaultVersions.java">org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestHTableDescriptor.java">org/apache/hadoop/hbase/TestHTableDescriptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestIOFencing.java">org/apache/hadoop/hbase/TestIOFencing.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestInfoServers.java">org/apache/hadoop/hbase/TestInfoServers.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestJMXConnectorServer.java">org/apache/hadoop/hbase/TestJMXConnectorServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestKeyValue.java">org/apache/hadoop/hbase/TestKeyValue.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestLocalHBaseCluster.java">org/apache/hadoop/hbase/TestLocalHBaseCluster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestMetaTableAccessor.java">org/apache/hadoop/hbase/TestMetaTableAccessor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestMetaTableAccessorNoCluster.java">org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestMetaTableLocator.java">org/apache/hadoop/hbase/TestMetaTableLocator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>41</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestMovedRegionsCleaner.java">org/apache/hadoop/hbase/TestMovedRegionsCleaner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestMultiVersions.java">org/apache/hadoop/hbase/TestMultiVersions.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestNamespace.java">org/apache/hadoop/hbase/TestNamespace.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestNodeHealthCheckChore.java">org/apache/hadoop/hbase/TestNodeHealthCheckChore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestPartialResultsFromClientSide.java">org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>24</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestPerformanceEvaluation.java">org/apache/hadoop/hbase/TestPerformanceEvaluation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestRegionRebalancing.java">org/apache/hadoop/hbase/TestRegionRebalancing.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestSerialization.java">org/apache/hadoop/hbase/TestSerialization.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestServerSideScanMetricsFromClientSide.java">org/apache/hadoop/hbase/TestServerSideScanMetricsFromClientSide.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>15</td></tr>
-<tr class="b">
+<td>10</td></tr>
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestStochasticBalancerJmxMetrics.java">org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestTimeout.java">org/apache/hadoop/hbase/TestTimeout.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TimestampTestBase.java">org/apache/hadoop/hbase/TimestampTestBase.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>14</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.UnknownRegionException.java">org/apache/hadoop/hbase/UnknownRegionException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.Waiter.java">org/apache/hadoop/hbase/Waiter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ZKNamespaceManager.java">org/apache/hadoop/hbase/ZKNamespaceManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ZNodeClearer.java">org/apache/hadoop/hbase/ZNodeClearer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.backup.BackupDriver.java">org/apache/hadoop/hbase/backup/BackupDriver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.backup.FailedArchiveException.java">org/apache/hadoop/hbase/backup/FailedArchiveException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.backup.HFileArchiver.java">org/apache/hadoop/hbase/backup/HFileArchiver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.backup.TestBackupMerge.java">org/apache/hadoop/hbase/backup/TestBackupMerge.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.backup.TestHFileArchiving.java">org/apache/hadoop/hbase/backup/TestHFileArchiving.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.backup.example.HFileArchiveManager.java">org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.backup.example.LongTermArchivingHFileCleaner.java">org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.backup.example.TableHFileArchiveTracker.java">org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.backup.example.TestZooKeeperTableArchiveClient.java">org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.backup.example.ZKTableArchiveClient.java">org/apache/hadoop/hbase/backup/example/ZKTableArchiveClient.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.backup.impl.BackupCommands.java">org/apache/hadoop/hbase/backup/impl/BackupCommands.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.backup.impl.BackupManager.java">org/apache/hadoop/hbase/backup/impl/BackupManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.backup.impl.BackupSystemTable.java">org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.chaos.actions.Action.java">org/apache/hadoop/hbase/chaos/actions/Action.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.chaos.actions.ChangeCompressionAction.java">org/apache/hadoop/hbase/chaos/actions/ChangeCompressionAction.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.chaos.actions.ChangeEncodingAction.java">org/apache/hadoop/hbase/chaos/actions/ChangeEncodingAction.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.chaos.actions.ChangeSplitPolicyAction.java">org/apache/hadoop/hbase/chaos/actions/ChangeSplitPolicyAction.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.chaos.actions.ChangeVersionsAction.java">org/apache/hadoop/hbase/chaos/actions/ChangeVersionsAction.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
+<tr class="b">
+<td><a href="#org.apache.hadoop.hbase.chaos.actions.FlushRandomRegionOfTableAction.java">org/apache/hadoop/hbase/chaos/actions/FlushRandomRegionOfTableAction.java</a></td>
+<td>0</td>
+<td>0</td>
+<td>1</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.chaos.actions.RestartRandomDataNodeAction.java">org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.java</a></td>
 <td>0</td>
@@ -1472,7 +1482,7 @@
 <td><a href="#org.apache.hadoop.hbase.client.RawAsyncHBaseAdmin.java">org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>89</td></tr>
+<td>87</td></tr>
 <tr class="b">
 <td><a href="#org.apache.hadoop.hbase.client.RegionAdminServiceCallable.java">org/apache/hadoop/hbase/client/RegionAdminServiceCallable.java</a></td>
 <td>0</td>
@@ -1637,7 +1647,7 @@
 <td><a href="#org.apache.hadoop.hbase.client.SimpleRequestController.java">org/apache/hadoop/hbase/client/SimpleRequestController.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>10</td></tr>
+<td>9</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.client.SimpleScanResultConsumer.java">org/apache/hadoop/hbase/client/SimpleScanResultConsumer.java</a></td>
 <td>0</td>
@@ -1697,7 +1707,7 @@
 <td><a href="#org.apache.hadoop.hbase.client.TestAsyncProcess.java">org/apache/hadoop/hbase/client/TestAsyncProcess.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>18</td></tr>
+<td>17</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.client.TestAsyncQuotaAdminApi.java">org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java</a></td>
 <td>0</td>
@@ -2097,7 +2107,7 @@
 <td><a href="#org.apache.hadoop.hbase.client.locking.EntityLock.java">org/apache/hadoop/hbase/client/locking/EntityLock.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>5</td></tr>
+<td>4</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.client.locking.TestEntityLocks.java">org/apache/hadoop/hbase/client/locking/TestEntityLocks.java</a></td>
 <td>0</td>
@@ -2374,1227 +2384,1227 @@
 <td>0</td>
 <td>1</td></tr>
 <tr class="b">
+<td><a href="#org.apache.hadoop.hbase.coprocessor.TestAsyncCoprocessorEndpoint.java">org/apache/hadoop/hbase/coprocessor/TestAsyncCoprocessorEndpoint.java</a></td>
+<td>0</td>
+<td>0</td>
+<td>4</td></tr>
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestCoprocessorInterface.java">org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestCoprocessorMetrics.java">org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestCoprocessorStop.java">org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestCoreMasterCoprocessor.java">org/apache/hadoop/hbase/coprocessor/TestCoreMasterCoprocessor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestCoreRegionCoprocessor.java">org/apache/hadoop/hbase/coprocessor/TestCoreRegionCoprocessor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestCoreRegionServerCoprocessor.java">org/apache/hadoop/hbase/coprocessor/TestCoreRegionServerCoprocessor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestMasterCoprocessorExceptionWithAbort.java">org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestMasterCoprocessorExceptionWithRemove.java">org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestMasterObserver.java">org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestOpenTableInCoprocessor.java">org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>2</td></tr>
-<tr class="b">
+<td>1</td></tr>
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestRegionObserverBypass.java">org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestRegionObserverForAddingMutationsFromCoprocessors.java">org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestRegionObserverInterface.java">org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>26</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestRegionObserverScannerOpenHook.java">org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.TestRegionObserverStacking.java">org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.WALCoprocessor.java">org/apache/hadoop/hbase/coprocessor/WALCoprocessor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.WALCoprocessorEnvironment.java">org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.WALObserver.java">org/apache/hadoop/hbase/coprocessor/WALObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.example.BulkDeleteEndpoint.java">org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.example.ExampleMasterObserverWithMetrics.java">org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.example.RefreshHFilesEndpoint.java">org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.example.TestRefreshHFilesEndpoint.java">org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.package-info.java">org/apache/hadoop/hbase/coprocessor/package-info.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.errorhandling.ForeignException.java">org/apache/hadoop/hbase/errorhandling/ForeignException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher.java">org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.errorhandling.TestForeignExceptionDispatcher.java">org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.errorhandling.TestForeignExceptionSerialization.java">org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.errorhandling.TimeoutExceptionInjector.java">org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil.java">org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.exceptions.FailedSanityCheckException.java">org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.exceptions.MergeRegionException.java">org/apache/hadoop/hbase/exceptions/MergeRegionException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.exceptions.TestClientExceptionsUtil.java">org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.executor.EventHandler.java">org/apache/hadoop/hbase/executor/EventHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.executor.EventType.java">org/apache/hadoop/hbase/executor/EventType.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>38</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.executor.ExecutorService.java">org/apache/hadoop/hbase/executor/ExecutorService.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.executor.ExecutorType.java">org/apache/hadoop/hbase/executor/ExecutorType.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>18</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper.java">org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>25</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.favored.FavoredNodeLoadBalancer.java">org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.favored.FavoredNodesPlan.java">org/apache/hadoop/hbase/favored/FavoredNodesPlan.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.favored.FavoredNodesPromoter.java">org/apache/hadoop/hbase/favored/FavoredNodesPromoter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.favored.StartcodeAgnosticServerName.java">org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.favored.TestFavoredNodeAssignmentHelper.java">org/apache/hadoop/hbase/favored/TestFavoredNodeAssignmentHelper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.BigDecimalComparator.java">org/apache/hadoop/hbase/filter/BigDecimalComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.BinaryComparator.java">org/apache/hadoop/hbase/filter/BinaryComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.BinaryPrefixComparator.java">org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.BitComparator.java">org/apache/hadoop/hbase/filter/BitComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.ByteArrayComparable.java">org/apache/hadoop/hbase/filter/ByteArrayComparable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.ColumnCountGetFilter.java">org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.ColumnPaginationFilter.java">org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.ColumnPrefixFilter.java">org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.ColumnRangeFilter.java">org/apache/hadoop/hbase/filter/ColumnRangeFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.CompareFilter.java">org/apache/hadoop/hbase/filter/CompareFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>24</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.DependentColumnFilter.java">org/apache/hadoop/hbase/filter/DependentColumnFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.FamilyFilter.java">org/apache/hadoop/hbase/filter/FamilyFilter.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>12</td></tr>
-<tr class="b">
+<td>13</td></tr>
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.Filter.java">org/apache/hadoop/hbase/filter/Filter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.FilterAllFilter.java">org/apache/hadoop/hbase/filter/FilterAllFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.FilterBase.java">org/apache/hadoop/hbase/filter/FilterBase.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.FilterList.java">org/apache/hadoop/hbase/filter/FilterList.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.FilterListBase.java">org/apache/hadoop/hbase/filter/FilterListBase.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.FilterListWithAND.java">org/apache/hadoop/hbase/filter/FilterListWithAND.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.FilterListWithOR.java">org/apache/hadoop/hbase/filter/FilterListWithOR.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>99</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.FilterTestingCluster.java">org/apache/hadoop/hbase/filter/FilterTestingCluster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.FilterWrapper.java">org/apache/hadoop/hbase/filter/FilterWrapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter.java">org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.FirstKeyValueMatchingQualifiersFilter.java">org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.FuzzyRowFilter.java">org/apache/hadoop/hbase/filter/FuzzyRowFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>20</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.InclusiveStopFilter.java">org/apache/hadoop/hbase/filter/InclusiveStopFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.KeyOnlyFilter.java">org/apache/hadoop/hbase/filter/KeyOnlyFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.LongComparator.java">org/apache/hadoop/hbase/filter/LongComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>25</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.MultiRowRangeFilter.java">org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter.java">org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>15</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.NullComparator.java">org/apache/hadoop/hbase/filter/NullComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.PageFilter.java">org/apache/hadoop/hbase/filter/PageFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.ParseConstants.java">org/apache/hadoop/hbase/filter/ParseConstants.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.ParseFilter.java">org/apache/hadoop/hbase/filter/ParseFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>47</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.PrefixFilter.java">org/apache/hadoop/hbase/filter/PrefixFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.QualifierFilter.java">org/apache/hadoop/hbase/filter/QualifierFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.RandomRowFilter.java">org/apache/hadoop/hbase/filter/RandomRowFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.RegexStringComparator.java">org/apache/hadoop/hbase/filter/RegexStringComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.RowFilter.java">org/apache/hadoop/hbase/filter/RowFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter.java">org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>23</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.SingleColumnValueFilter.java">org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>23</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.SkipFilter.java">org/apache/hadoop/hbase/filter/SkipFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.SubstringComparator.java">org/apache/hadoop/hbase/filter/SubstringComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestColumnPaginationFilter.java">org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>40</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestColumnPrefixFilter.java">org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestColumnRangeFilter.java">org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestComparators.java">org/apache/hadoop/hbase/filter/TestComparators.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestDependentColumnFilter.java">org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestFilter.java">org/apache/hadoop/hbase/filter/TestFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>352</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestFilterList.java">org/apache/hadoop/hbase/filter/TestFilterList.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>19</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestFilterListOnMini.java">org/apache/hadoop/hbase/filter/TestFilterListOnMini.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestFilterListOrOperatorWithBlkCnt.java">org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestFilterSerialization.java">org/apache/hadoop/hbase/filter/TestFilterSerialization.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestFilterWithScanLimits.java">org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestFilterWrapper.java">org/apache/hadoop/hbase/filter/TestFilterWrapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestFirstKeyValueMatchingQualifiersFilter.java">org/apache/hadoop/hbase/filter/TestFirstKeyValueMatchingQualifiersFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestFuzzyRowAndColumnRangeFilter.java">org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestFuzzyRowFilterEndToEnd.java">org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestInclusiveStopFilter.java">org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestLongComparator.java">org/apache/hadoop/hbase/filter/TestLongComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestMultiRowRangeFilter.java">org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestMultipleColumnPrefixFilter.java">org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestNullComparator.java">org/apache/hadoop/hbase/filter/TestNullComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestPageFilter.java">org/apache/hadoop/hbase/filter/TestPageFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestPrefixFilter.java">org/apache/hadoop/hbase/filter/TestPrefixFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestRandomRowFilter.java">org/apache/hadoop/hbase/filter/TestRandomRowFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestRegexComparator.java">org/apache/hadoop/hbase/filter/TestRegexComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestScanRowPrefix.java">org/apache/hadoop/hbase/filter/TestScanRowPrefix.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TestSingleColumnValueExcludeFilter.java">org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.TestSingleColumnValueFilter.java">org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TimestampsFilter.java">org/apache/hadoop/hbase/filter/TimestampsFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.ValueFilter.java">org/apache/hadoop/hbase/filter/ValueFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.WhileMatchFilter.java">org/apache/hadoop/hbase/filter/WhileMatchFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.fs.HFileSystem.java">org/apache/hadoop/hbase/fs/HFileSystem.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.fs.TestBlockReorderBlockLocation.java">org/apache/hadoop/hbase/fs/TestBlockReorderBlockLocation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.fs.TestBlockReorderMultiBlocks.java">org/apache/hadoop/hbase/fs/TestBlockReorderMultiBlocks.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.ClickjackingPreventionFilter.java">org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.HtmlQuoting.java">org/apache/hadoop/hbase/http/HtmlQuoting.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.HttpConfig.java">org/apache/hadoop/hbase/http/HttpConfig.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.HttpRequestLogAppender.java">org/apache/hadoop/hbase/http/HttpRequestLogAppender.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.HttpServer.java">org/apache/hadoop/hbase/http/HttpServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>38</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.HttpServerFunctionalTest.java">org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.InfoServer.java">org/apache/hadoop/hbase/http/InfoServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.NoCacheFilter.java">org/apache/hadoop/hbase/http/NoCacheFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.TestGlobalFilter.java">org/apache/hadoop/hbase/http/TestGlobalFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.TestHtmlQuoting.java">org/apache/hadoop/hbase/http/TestHtmlQuoting.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.TestHttpServer.java">org/apache/hadoop/hbase/http/TestHttpServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.TestPathFilter.java">org/apache/hadoop/hbase/http/TestPathFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.TestServletFilter.java">org/apache/hadoop/hbase/http/TestServletFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.TestSpnegoHttpServer.java">org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>18</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.conf.ConfServlet.java">org/apache/hadoop/hbase/http/conf/ConfServlet.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.jmx.JMXJsonServlet.java">org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.log.LogLevel.java">org/apache/hadoop/hbase/http/log/LogLevel.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.log.TestLogLevel.java">org/apache/hadoop/hbase/http/log/TestLogLevel.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.resource.JerseyResource.java">org/apache/hadoop/hbase/http/resource/JerseyResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil.java">org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.ByteBuffInputStream.java">org/apache/hadoop/hbase/io/ByteBuffInputStream.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.ByteBufferOutputStream.java">org/apache/hadoop/hbase/io/ByteBufferOutputStream.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.CellOutputStream.java">org/apache/hadoop/hbase/io/CellOutputStream.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.FSDataInputStreamWrapper.java">org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.FileLink.java">org/apache/hadoop/hbase/io/FileLink.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>14</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.HFileLink.java">org/apache/hadoop/hbase/io/HFileLink.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.HalfStoreFileReader.java">org/apache/hadoop/hbase/io/HalfStoreFileReader.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>15</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.HeapSize.java">org/apache/hadoop/hbase/io/HeapSize.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.ImmutableBytesWritable.java">org/apache/hadoop/hbase/io/ImmutableBytesWritable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.Reference.java">org/apache/hadoop/hbase/io/Reference.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>19</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.TagCompressionContext.java">org/apache/hadoop/hbase/io/TagCompressionContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.TestByteBufferOutputStream.java">org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.TestFileLink.java">org/apache/hadoop/hbase/io/TestFileLink.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.TestHFileLink.java">org/apache/hadoop/hbase/io/TestHFileLink.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.TestHalfStoreFileReader.java">org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.TestHeapSize.java">org/apache/hadoop/hbase/io/TestHeapSize.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.TestImmutableBytesWritable.java">org/apache/hadoop/hbase/io/TestImmutableBytesWritable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.TestMetricsIO.java">org/apache/hadoop/hbase/io/TestMetricsIO.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.TimeRange.java">org/apache/hadoop/hbase/io/TimeRange.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.WALLink.java">org/apache/hadoop/hbase/io/WALLink.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.asyncfs.AsyncFSOutput.java">org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.java">org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.compress.Compression.java">org/apache/hadoop/hbase/io/compress/Compression.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.Cipher.java">org/apache/hadoop/hbase/io/crypto/Cipher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.Decryptor.java">org/apache/hadoop/hbase/io/crypto/Decryptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.Encryption.java">org/apache/hadoop/hbase/io/crypto/Encryption.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>53</td></tr>
-<tr class="a">
+<td>52</td></tr>
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.Encryptor.java">org/apache/hadoop/hbase/io/crypto/Encryptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.KeyProvider.java">org/apache/hadoop/hbase/io/crypto/KeyProvider.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.TestEncryption.java">org/apache/hadoop/hbase/io/crypto/TestEncryption.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.aes.CryptoAES.java">org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.BufferedDataBlockEncoder.java">org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.CopyKeyDataBlockEncoder.java">org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.java">org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>15</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.DataBlockEncoding.java">org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.EncodedDataBlock.java">org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext.java">org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext.java">org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext.java">org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.RowIndexCodecV1.java">org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.TestDataBlockEncoders.java">org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.TestEncodedSeekers.java">org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream.java">org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.BlockCache.java">org/apache/hadoop/hbase/io/hfile/BlockCache.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.BlockCacheKey.java">org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.java">org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>28</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.BlockCachesIterator.java">org/apache/hadoop/hbase/io/hfile/BlockCachesIterator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.BlockType.java">org/apache/hadoop/hbase/io/hfile/BlockType.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CacheConfig.java">org/apache/hadoop/hbase/io/hfile/CacheConfig.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>8</td></tr>
-<tr class="a">
+<td>10</td></tr>
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CacheStats.java">org/apache/hadoop/hbase/io/hfile/CacheStats.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CacheTestUtils.java">org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.Cacheable.java">org/apache/hadoop/hbase/io/hfile/Cacheable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CacheableDeserializer.java">org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager.java">org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>4</td></tr>
-<tr class="b">
+<td>2</td></tr>
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.ChecksumUtil.java">org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CombinedBlockCache.java">org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CompoundBloomFilter.java">org/apache/hadoop/hbase/io/hfile/CompoundBloomFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CompoundBloomFilterBase.java">org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterBase.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CompoundBloomFilterWriter.java">org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CorruptHFileException.java">org/apache/hadoop/hbase/io/hfile/CorruptHFileException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.FixedFileTrailer.java">org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>14</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFile.java">org/apache/hadoop/hbase/io/hfile/HFile.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>36</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileBlock.java">org/apache/hadoop/hbase/io/hfile/HFileBlock.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>25</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.java">org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>39</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileContext.java">org/apache/hadoop/hbase/io/hfile/HFileContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder.java">org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl.java">org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter.java">org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>14</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileReaderImpl.java">org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>52</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileScanner.java">org/apache/hadoop/hbase/io/hfile/HFileScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>23</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileUtil.java">org/apache/hadoop/hbase/io/hfile/HFileUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileWriterImpl.java">org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>26</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.KVGenerator.java">org/apache/hadoop/hbase/io/hfile/KVGenerator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.LruBlockCache.java">org/apache/hadoop/hbase/io/hfile/LruBlockCache.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>13</td></tr>
-<tr class="b">
+<td>14</td></tr>
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.LruCachedBlock.java">org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.LruCachedBlockQueue.java">org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.NanoTimer.java">org/apache/hadoop/hbase/io/hfile/NanoTimer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder.java">org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.PrefetchExecutor.java">org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.RandomKeyValueUtil.java">org/apache/hadoop/hbase/io/hfile/RandomKeyValueUtil.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>14</td></tr>
-<tr class="b">
+<td>13</td></tr>
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestBlockCacheReporting.java">org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestCacheConfig.java">org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.java">org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestCachedBlockQueue.java">org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>19</td></tr>
-<tr class="b">
+<td>1</td></tr>
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestChecksum.java">org/apache/hadoop/hbase/io/hfile/TestChecksum.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestFixedFileTrailer.java">org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestForceCacheImportantBlocks.java">org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestHFile.java">org/apache/hadoop/hbase/io/hfile/TestHFile.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>27</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestHFileBlock.java">org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestHFileBlockIndex.java">org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>41</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestHFileBlockPositionalRead.java">org/apache/hadoop/hbase/io/hfile/TestHFileBlockPositionalRead.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestHFileDataBlockEncoder.java">org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestHFileWriterV3.java">org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestLruBlockCache.java">org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestPrefetch.java">org/apache/hadoop/hbase/io/hfile/TestPrefetch.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestScannerFromBucketCache.java">org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestScannerSelectionUsingKeyRange.java">org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator.java">org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>33</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.java">org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>38</td></tr>
-<tr class="a">
+<td>30</td></tr>
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.BucketCacheStats.java">org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.ByteBufferIOEngine.java">org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.CachedEntryQueue.java">org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.FileIOEngine.java">org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.FileMmapEngine.java">org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.IOEngine.java">org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.TestBucketCache.java">org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>15</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.TestBucketWriterThread.java">org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
-<td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.TestFileIOEngine.java">org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java</a></td>
-<td>0</td>
-<td>0</td>
-<td>1</td></tr>
 <tr class="b">
-<td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap.java">org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.java</a></td>
+<td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.TestFileIOEngine.java">org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
@@ -3872,7 +3882,7 @@
 <td><a href="#org.apache.hadoop.hbase.ipc.SimpleRpcScheduler.java">org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>9</td></tr>
+<td>10</td></tr>
 <tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.SimpleRpcServer.java">org/apache/hadoop/hbase/ipc/SimpleRpcServer.java</a></td>
 <td>0</td>
@@ -4047,7 +4057,7 @@
 <td><a href="#org.apache.hadoop.hbase.mapred.TestTableInputFormat.java">org/apache/hadoop/hbase/mapred/TestTableInputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>22</td></tr>
+<td>21</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapred.TestTableMapReduce.java">org/apache/hadoop/hbase/mapred/TestTableMapReduce.java</a></td>
 <td>0</td>
@@ -4447,7 +4457,7 @@
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TestTableInputFormat.java">org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>29</td></tr>
+<td>28</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TestTableInputFormatBase.java">org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java</a></td>
 <td>0</td>
@@ -4797,7 +4807,7 @@
 <td><a href="#org.apache.hadoop.hbase.master.TestRegionPlacement.java">org/apache/hadoop/hbase/master/TestRegionPlacement.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>20</td></tr>
+<td>19</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.TestRegionPlacement2.java">org/apache/hadoop/hbase/master/TestRegionPlacement2.java</a></td>
 <td>0</td>
@@ -4917,7 +4927,7 @@
 <td><a href="#org.apache.hadoop.hbase.master.balancer.BalancerTestBase.java">org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>75</td></tr>
+<td>74</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.java">org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java</a></td>
 <td>0</td>
@@ -6572,7 +6582,7 @@
 <td><a href="#org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost.java">org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>7</td></tr>
+<td>8</td></tr>
 <tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RegionServerServices.java">org/apache/hadoop/hbase/regionserver/RegionServerServices.java</a></td>
 <td>0</td>
@@ -6932,12 +6942,12 @@
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestHStore.java">org/apache/hadoop/hbase/regionserver/TestHStore.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>41</td></tr>
+<td>40</td></tr>
 <tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestHStoreFile.java">org/apache/hadoop/hbase/regionserver/TestHStoreFile.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>23</td></tr>
+<td>22</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestJoinedScanners.java">org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java</a></td>
 <td>0</td>
@@ -7847,7 +7857,7 @@
 <td><a href="#org.apache.hadoop.hbase.rest.HBaseRESTTestingUtility.java">org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>14</td></tr>
+<td>13</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.MetricsREST.java">org/apache/hadoop/hbase/rest/MetricsREST.java</a></td>
 <td>0</td>
@@ -7917,7 +7927,7 @@
 <td><a href="#org.apache.hadoop.hbase.rest.RowResource.java">org/apache/hadoop/hbase/rest/RowResource.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>60</td></tr>
+<td>52</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.RowResourceBase.java">org/apache/hadoop/hbase/rest/RowResourceBase.java</a></td>
 <td>0</td>
@@ -7947,7 +7957,7 @@
 <td><a href="#org.apache.hadoop.hbase.rest.SchemaResource.java">org/apache/hadoop/hbase/rest/SchemaResource.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>10</td></tr>
+<td>9</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.StorageClusterStatusResource.java">org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java</a></td>
 <td>0</td>
@@ -8742,7 +8752,7 @@
 <td><a href="#org.apache.hadoop.hbase.snapshot.TablePartiallyOpenException.java">org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>3</td></tr>
+<td>4</td></tr>
 <tr class="b">
 <td><a href="#org.apache.hadoop.hbase.snapshot.TestExportSnapshot.java">org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java</a></td>
 <td>0</td>
@@ -8804,440 +8814,440 @@
 <td>0</td>
 <td>14</td></tr>
 <tr class="b">
-<td><a href="#org.apache.hadoop.hbase.test.IntegrationTestZKAndFSPermissions.java">org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java</a></td>
-<td>0</td>
-<td>0</td>
-<td>1</td></tr>
-<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.thrift.DemoClient.java">org/apache/hadoop/hbase/thrift/DemoClient.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>250</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.thrift.HttpDoAsClient.java">org/apache/hadoop/hbase/thrift/HttpDoAsClient.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.thrift2.DemoClient.java">org/apache/hadoop/hbase/thrift2/DemoClient.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.thrift2.ThriftServer.java">org/apache/hadoop/hbase/thrift2/ThriftServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.tool.Canary.java">org/apache/hadoop/hbase/tool/Canary.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>44</td></tr>
-<tr class="b">
+<td>43</td></tr>
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.tool.LoadIncrementalHFiles.java">org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.tool.MapreduceTestingShim.java">org/apache/hadoop/hbase/tool/MapreduceTestingShim.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.tool.TestCanaryTool.java">org/apache/hadoop/hbase/tool/TestCanaryTool.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.tool.TestLoadIncrementalHFiles.java">org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.tool.TestLoadIncrementalHFilesSplitRecovery.java">org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.trace.IntegrationTestSendTraceRequests.java">org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.trace.SpanReceiverHost.java">org/apache/hadoop/hbase/trace/SpanReceiverHost.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.trace.TraceTree.java">org/apache/hadoop/hbase/trace/TraceTree.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.trace.TraceUtil.java">org/apache/hadoop/hbase/trace/TraceUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.types.TestCopyOnWriteMaps.java">org/apache/hadoop/hbase/types/TestCopyOnWriteMaps.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.types.TestOrderedBlob.java">org/apache/hadoop/hbase/types/TestOrderedBlob.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.types.TestOrderedString.java">org/apache/hadoop/hbase/types/TestOrderedString.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.types.TestStruct.java">org/apache/hadoop/hbase/types/TestStruct.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>21</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.types.TestTerminatedWrapper.java">org/apache/hadoop/hbase/types/TestTerminatedWrapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.types.TestUnion2.java">org/apache/hadoop/hbase/types/TestUnion2.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.util.AbstractByteRange.java">org/apache/hadoop/hbase/util/AbstractByteRange.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.util.AbstractFileStatusFilter.java">org/apache/hadoop/hbase/util/AbstractFileStatusFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.util.AbstractHBaseTool.java">org/apache/hadoop/hbase/util/AbstractHBaseTool.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.util.AbstractPositionedByteRange.java">org/apache/hadoop/hbase/util/AbstractPositionedByteRange.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.util.Addressing.java">org/apache/hadoop/hbase/util/Addressing.java</a></td>
 <td>0</td>
 <t

<TRUNCATED>

[18/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
index bd3c59e..21e240a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
@@ -33,62 +33,62 @@
 <span class="sourceLineNo">025</span>import java.io.FileNotFoundException;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.io.FileOutputStream;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.io.IOException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.io.ObjectInputStream;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.io.ObjectOutputStream;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.io.Serializable;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import java.nio.ByteBuffer;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.util.ArrayList;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.util.Comparator;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.util.HashSet;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.util.Iterator;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import java.util.List;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import java.util.Map;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import java.util.NavigableSet;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import java.util.PriorityQueue;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import java.util.Set;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import java.util.concurrent.BlockingQueue;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import java.util.concurrent.ConcurrentMap;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.TimeUnit;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import java.util.concurrent.atomic.LongAdder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import java.util.concurrent.locks.Lock;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.conf.Configuration;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.util.StringUtils;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.slf4j.Logger;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.slf4j.LoggerFactory;<a name="line.79"></a>
-<span class="sourceLineNo">080</span><a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.83"></a>
+<span class="sourceLineNo">028</span>import java.io.Serializable;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.nio.ByteBuffer;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.ArrayList;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.Comparator;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import java.util.HashSet;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.Iterator;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.List;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.Map;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.NavigableSet;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import java.util.PriorityQueue;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import java.util.Set;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import java.util.concurrent.BlockingQueue;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import java.util.concurrent.ConcurrentMap;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import java.util.concurrent.Executors;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.TimeUnit;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.LongAdder;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.locks.Lock;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.conf.Configuration;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.protobuf.ProtobufMagic;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.util.StringUtils;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.Logger;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.slf4j.LoggerFactory;<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos;<a name="line.83"></a>
 <span class="sourceLineNo">084</span><a name="line.84"></a>
 <span class="sourceLineNo">085</span>/**<a name="line.85"></a>
 <span class="sourceLineNo">086</span> * BucketCache uses {@link BucketAllocator} to allocate/free blocks, and uses<a name="line.86"></a>
@@ -172,1540 +172,1557 @@
 <span class="sourceLineNo">164</span>  private volatile boolean freeInProgress = false;<a name="line.164"></a>
 <span class="sourceLineNo">165</span>  private final Lock freeSpaceLock = new ReentrantLock();<a name="line.165"></a>
 <span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  private UniqueIndexMap&lt;Integer&gt; deserialiserMap = new UniqueIndexMap&lt;&gt;();<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  private final LongAdder heapSize = new LongAdder();<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  /** Current number of cached elements */<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.172"></a>
-<span class="sourceLineNo">173</span><a name="line.173"></a>
-<span class="sourceLineNo">174</span>  /** Cache access count (sequential ID) */<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  boolean wait_when_cache = false;<a name="line.181"></a>
+<span class="sourceLineNo">167</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  private final LongAdder heapSize = new LongAdder();<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  /** Current number of cached elements */<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>  /** Cache access count (sequential ID) */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.176"></a>
+<span class="sourceLineNo">177</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.177"></a>
+<span class="sourceLineNo">178</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.178"></a>
+<span class="sourceLineNo">179</span>  boolean wait_when_cache = false;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.181"></a>
 <span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  private final String persistencePath;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  private final long cacheCapacity;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /** Approximate block size */<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  private final long blockSize;<a name="line.188"></a>
-<span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  private final int ioErrorsTolerationDuration;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  // 1 min<a name="line.192"></a>
-<span class="sourceLineNo">193</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.193"></a>
-<span class="sourceLineNo">194</span><a name="line.194"></a>
-<span class="sourceLineNo">195</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  // reset after a successful read/write.<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private volatile long ioErrorStartTime = -1;<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * &lt;p&gt;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   */<a name="line.204"></a>
-<span class="sourceLineNo">205</span>  @VisibleForTesting<a name="line.205"></a>
-<span class="sourceLineNo">206</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        @Override<a name="line.210"></a>
-<span class="sourceLineNo">211</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.212"></a>
-<span class="sourceLineNo">213</span>          if (nameComparison != 0) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            return nameComparison;<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>          if (a.getOffset() == b.getOffset()) {<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            return 0;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            return -1;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>          }<a name="line.221"></a>
-<span class="sourceLineNo">222</span>          return 1;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      });<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.226"></a>
-<span class="sourceLineNo">227</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  // Allocate or free space for the block<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private BucketAllocator bucketAllocator;<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  private float acceptableFactor;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private float minFactor;<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private float extraFreeFactor;<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /** Single access bucket size */<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  private float singleFactor;<a name="line.243"></a>
-<span class="sourceLineNo">244</span><a name="line.244"></a>
-<span class="sourceLineNo">245</span>  /** Multiple access bucket size */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private float multiFactor;<a name="line.246"></a>
-<span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  /** In-memory bucket size */<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private float memoryFactor;<a name="line.249"></a>
-<span class="sourceLineNo">250</span><a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      IOException {<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  }<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.258"></a>
-<span class="sourceLineNo">259</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.259"></a>
-<span class="sourceLineNo">260</span>                     Configuration conf)<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      throws FileNotFoundException, IOException {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    long blockNumCapacity = capacity / blockSize;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      // Enough for about 32TB of cache!<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
-<span class="sourceLineNo">269</span><a name="line.269"></a>
-<span class="sourceLineNo">270</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.275"></a>
+<span class="sourceLineNo">183</span>  private final String persistencePath;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  private final long cacheCapacity;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  /** Approximate block size */<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  private final long blockSize;<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  private final int ioErrorsTolerationDuration;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  // 1 min<a name="line.190"></a>
+<span class="sourceLineNo">191</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  // reset after a successful read/write.<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  private volatile long ioErrorStartTime = -1;<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;p&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   */<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  @VisibleForTesting<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.207"></a>
+<span class="sourceLineNo">208</span>        @Override<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.210"></a>
+<span class="sourceLineNo">211</span>          if (nameComparison != 0) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>            return nameComparison;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>          }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>          if (a.getOffset() == b.getOffset()) {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>            return 0;<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>            return -1;<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>          return 1;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        }<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      });<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.224"></a>
+<span class="sourceLineNo">225</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>  // Allocate or free space for the block<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private BucketAllocator bucketAllocator;<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  private float acceptableFactor;<a name="line.232"></a>
+<span class="sourceLineNo">233</span><a name="line.233"></a>
+<span class="sourceLineNo">234</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  private float minFactor;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private float extraFreeFactor;<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>  /** Single access bucket size */<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  private float singleFactor;<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /** Multiple access bucket size */<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private float multiFactor;<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>  /** In-memory bucket size */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private float memoryFactor;<a name="line.247"></a>
+<span class="sourceLineNo">248</span><a name="line.248"></a>
+<span class="sourceLineNo">249</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      IOException {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  }<a name="line.254"></a>
+<span class="sourceLineNo">255</span><a name="line.255"></a>
+<span class="sourceLineNo">256</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.256"></a>
+<span class="sourceLineNo">257</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.257"></a>
+<span class="sourceLineNo">258</span>                     Configuration conf)<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      throws FileNotFoundException, IOException {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    long blockNumCapacity = capacity / blockSize;<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      // Enough for about 32TB of cache!<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
+<span class="sourceLineNo">267</span><a name="line.267"></a>
+<span class="sourceLineNo">268</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>    sanityCheckConfigs();<a name="line.275"></a>
 <span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    sanityCheckConfigs();<a name="line.277"></a>
-<span class="sourceLineNo">278</span><a name="line.278"></a>
-<span class="sourceLineNo">279</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.280"></a>
-<span class="sourceLineNo">281</span>        ", memoryFactor: " + memoryFactor);<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    this.cacheCapacity = capacity;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    this.persistencePath = persistencePath;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    this.blockSize = blockSize;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.286"></a>
-<span class="sourceLineNo">287</span><a name="line.287"></a>
-<span class="sourceLineNo">288</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>    assert writerQueues.size() == writerThreads.length;<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.294"></a>
+<span class="sourceLineNo">277</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.277"></a>
+<span class="sourceLineNo">278</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.278"></a>
+<span class="sourceLineNo">279</span>        ", memoryFactor: " + memoryFactor);<a name="line.279"></a>
+<span class="sourceLineNo">280</span><a name="line.280"></a>
+<span class="sourceLineNo">281</span>    this.cacheCapacity = capacity;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    this.persistencePath = persistencePath;<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    this.blockSize = blockSize;<a name="line.283"></a>
+<span class="sourceLineNo">284</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    }<a name="line.289"></a>
+<span class="sourceLineNo">290</span><a name="line.290"></a>
+<span class="sourceLineNo">291</span>    assert writerQueues.size() == writerThreads.length;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.294"></a>
 <span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        retrieveFromFile(bucketSizes);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      } catch (IOException ioex) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        LOG.error("Can't restore from file because of", ioex);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      } catch (ClassNotFoundException cnfe) {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        LOG.error("Can't restore from file in rebuild because can't deserialise",cnfe);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        throw new RuntimeException(cnfe);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    }<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    final String threadName = Thread.currentThread().getName();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    this.cacheEnabled = true;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      writerThreads[i].setDaemon(true);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    startWriterThreads();<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    // every five minutes.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.320"></a>
-<span class="sourceLineNo">321</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  }<a name="line.327"></a>
-<span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  private void sanityCheckConfigs() {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.337"></a>
-<span class="sourceLineNo">338</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  }<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * starting the threads.<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  @VisibleForTesting<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected void startWriterThreads() {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    for (WriterThread thread : writerThreads) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>      thread.start();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">296</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      try {<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        retrieveFromFile(bucketSizes);<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      } catch (IOException ioex) {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>        LOG.error("Can't restore from file[" + persistencePath + "] because of ", ioex);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    }<a name="line.302"></a>
+<span class="sourceLineNo">303</span>    final String threadName = Thread.currentThread().getName();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    this.cacheEnabled = true;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.306"></a>
+<span class="sourceLineNo">307</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>      writerThreads[i].setDaemon(true);<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    startWriterThreads();<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    // every five minutes.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.315"></a>
+<span class="sourceLineNo">316</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.317"></a>
+<span class="sourceLineNo">318</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.318"></a>
+<span class="sourceLineNo">319</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
+<span class="sourceLineNo">323</span><a name="line.323"></a>
+<span class="sourceLineNo">324</span>  private void sanityCheckConfigs() {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.325"></a>
+<span class="sourceLineNo">326</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.327"></a>
+<span class="sourceLineNo">328</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.332"></a>
+<span class="sourceLineNo">333</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.333"></a>
+<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * starting the threads.<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   */<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  @VisibleForTesting<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  protected void startWriterThreads() {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    for (WriterThread thread : writerThreads) {<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      thread.start();<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  }<a name="line.345"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  boolean isCacheEnabled() {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    return this.cacheEnabled;<a name="line.349"></a>
 <span class="sourceLineNo">350</span>  }<a name="line.350"></a>
 <span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>  @VisibleForTesting<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  boolean isCacheEnabled() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    return this.cacheEnabled;<a name="line.354"></a>
+<span class="sourceLineNo">352</span>  @Override<a name="line.352"></a>
+<span class="sourceLineNo">353</span>  public long getMaxSize() {<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    return this.cacheCapacity;<a name="line.354"></a>
 <span class="sourceLineNo">355</span>  }<a name="line.355"></a>
 <span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>  public long getMaxSize() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    return this.cacheCapacity;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  public String getIoEngine() {<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return ioEngine.toString();<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Get the IOEngine from the IO engine name<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   * @param ioEngineName<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param capacity<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   * @param persistencePath<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @return the IOEngine<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   * @throws IOException<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
-<span class="sourceLineNo">374</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      throws IOException {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      // the compatibility<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return new ByteBufferIOEngine(capacity);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    } else {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      throw new IllegalArgumentException(<a name="line.388"></a>
-<span class="sourceLineNo">389</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Cache the block with the specified name and buffer.<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   * @param cacheKey block's cache key<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param buf block buffer<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   */<a name="line.397"></a>
-<span class="sourceLineNo">398</span>  @Override<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    cacheBlock(cacheKey, buf, false);<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  }<a name="line.401"></a>
-<span class="sourceLineNo">402</span><a name="line.402"></a>
-<span class="sourceLineNo">403</span>  /**<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * Cache the block with the specified name and buffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   * @param cacheKey block's cache key<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @param cachedItem block buffer<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   * @param inMemory if block is in-memory<a name="line.407"></a>
-<span class="sourceLineNo">408</span>   */<a name="line.408"></a>
-<span class="sourceLineNo">409</span>  @Override<a name="line.409"></a>
-<span class="sourceLineNo">410</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * Cache the block to ramCache<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   * @param cacheKey block's cache key<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @param cachedItem block buffer<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @param inMemory if block is in-memory<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * @param wait if true, blocking wait when queue is full<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.421"></a>
-<span class="sourceLineNo">422</span>      boolean wait) {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    if (cacheEnabled) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>      } else {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      boolean inMemory, boolean wait) {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    if (!cacheEnabled) {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      return;<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    }<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.439"></a>
-<span class="sourceLineNo">440</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    RAMQueueEntry re =<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.447"></a>
-<span class="sourceLineNo">448</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.448"></a>
-<span class="sourceLineNo">449</span>     */<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      return;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    boolean successfulAddition = false;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    if (wait) {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      try {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.458"></a>
-<span class="sourceLineNo">459</span>      } catch (InterruptedException e) {<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        Thread.currentThread().interrupt();<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    } else {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>      successfulAddition = bq.offer(re);<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    }<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    if (!successfulAddition) {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>      ramCache.remove(cacheKey);<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      cacheStats.failInsert();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    } else {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      this.blockNumber.increment();<a name="line.469"></a>
-<span class="sourceLineNo">470</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      blocksByHFile.add(cacheKey);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    }<a name="line.472"></a>
-<span class="sourceLineNo">473</span>  }<a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Get the buffer of the block with the specified key.<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   * @param key block's cache key<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   */<a name="line.482"></a>
-<span class="sourceLineNo">483</span>  @Override<a name="line.483"></a>
-<span class="sourceLineNo">484</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      boolean updateCacheMetrics) {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    if (!cacheEnabled) {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>      return null;<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    }<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    if (re != null) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      if (updateCacheMetrics) {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      }<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      re.access(accessCount.incrementAndGet());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      return re.getData();<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    if (bucketEntry != null) {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      long start = System.nanoTime();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      try {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        lock.readLock().lock();<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // existence here.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>          // TODO : change this area - should be removed after server cells and<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // 12295 are available<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          int len = bucketEntry.getLength();<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          if (LOG.isTraceEnabled()) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.511"></a>
-<span class="sourceLineNo">512</span>          }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.513"></a>
-<span class="sourceLineNo">514</span>              bucketEntry.deserializerReference(this.deserialiserMap));<a name="line.514"></a>
-<span class="sourceLineNo">515</span>          long timeTaken = System.nanoTime() - start;<a name="line.515"></a>
-<span class="sourceLineNo">516</span>          if (updateCacheMetrics) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.517"></a>
-<span class="sourceLineNo">518</span>            cacheStats.ioHit(timeTaken);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>            bucketEntry.incrementRefCountAndGet();<a name="line.521"></a>
-<span class="sourceLineNo">522</span>          }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.523"></a>
-<span class="sourceLineNo">524</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>            ioErrorStartTime = -1;<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          }<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          return cachedBlock;<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        }<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      } catch (IOException ioex) {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        LOG.error("Failed reading block " + key + " from bucket cache", ioex);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>        checkIOErrorIsTolerated();<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      } finally {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        lock.readLock().unlock();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      }<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>    if (!repeat &amp;&amp; updateCacheMetrics) {<a name="line.536"></a>
-<span class="sourceLineNo">537</span>      cacheStats.miss(caching, key.isPrimary(), key.getBlockType());<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    }<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    return null;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  }<a name="line.540"></a>
-<span class="sourceLineNo">541</span><a name="line.541"></a>
-<span class="sourceLineNo">542</span>  @VisibleForTesting<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  void blockEvicted(BlockCacheKey cacheKey, BucketEntry bucketEntry, boolean decrementBlockNumber) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    bucketAllocator.freeBlock(bucketEntry.offset());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    realCacheSize.add(-1 * bucketEntry.getLength());<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    blocksByHFile.remove(cacheKey);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    if (decrementBlockNumber) {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.blockNumber.decrement();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
+<span class="sourceLineNo">357</span>  public String getIoEngine() {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    return ioEngine.toString();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Get the IOEngine from the IO engine name<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * @param ioEngineName<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * @param capacity<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * @param persistencePath<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return the IOEngine<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * @throws IOException<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throws IOException {<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.372"></a>
+<span class="sourceLineNo">373</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      // the compatibility<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.375"></a>
+<span class="sourceLineNo">376</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.378"></a>
+<span class="sourceLineNo">379</span>      return new ByteBufferIOEngine(capacity);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    } else {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      throw new IllegalArgumentException(<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  }<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * Cache the block with the specified name and buffer.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @param cacheKey block's cache key<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   * @param buf block buffer<a name="line.391"></a>
+<span class="sourceLineNo">392</span>   */<a name="line.392"></a>
+<span class="sourceLineNo">393</span>  @Override<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    cacheBlock(cacheKey, buf, false);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   * Cache the block with the specified name and buffer.<a name="line.399"></a>
+<span class="sourceLineNo">400</span>   * @param cacheKey block's cache key<a name="line.400"></a>
+<span class="sourceLineNo">401</span>   * @param cachedItem block buffer<a name="line.401"></a>
+<span class="sourceLineNo">402</span>   * @param inMemory if block is in-memory<a name="line.402"></a>
+<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  @Override<a name="line.404"></a>
+<span class="sourceLineNo">405</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * Cache the block to ramCache<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   * @param cacheKey block's cache key<a name="line.411"></a>
+<span class="sourceLineNo">412</span>   * @param cachedItem block buffer<a name="line.412"></a>
+<span class="sourceLineNo">413</span>   * @param inMemory if block is in-memory<a name="line.413"></a>
+<span class="sourceLineNo">414</span>   * @param wait if true, blocking wait when queue is full<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.416"></a>
+<span class="sourceLineNo">417</span>      boolean wait) {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    if (cacheEnabled) {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.420"></a>
+<span class="sourceLineNo">421</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.421"></a>
+<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      } else {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      }<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  }<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      boolean inMemory, boolean wait) {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    if (!cacheEnabled) {<a name="line.431"></a>
+<span class="sourceLineNo">432</span>      return;<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    RAMQueueEntry re =<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    /**<a name="line.438"></a>
+<span class="sourceLineNo">439</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.439"></a>
+<span class="sourceLineNo">440</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.440"></a>
+<span class="sourceLineNo">441</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.441"></a>
+<span class="sourceLineNo">442</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.442"></a>
+<span class="sourceLineNo">443</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>     */<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      return;<a name="line.446"></a>
+<span class="sourceLineNo">447</span>    }<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    boolean successfulAddition = false;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    if (wait) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      try {<a name="line.452"></a>
+<span class="sourceLineNo">453</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.453"></a>
+<span class="sourceLineNo">454</span>      } catch (InterruptedException e) {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>        Thread.currentThread().interrupt();<a name="line.455"></a>
+<span class="sourceLineNo">456</span>      }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>    } else {<a name="line.457"></a>
+<span class="sourceLineNo">458</span>      successfulAddition = bq.offer(re);<a name="line.458"></a>
+<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
+<span class="sourceLineNo">460</span>    if (!successfulAddition) {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>      ramCache.remove(cacheKey);<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      cacheStats.failInsert();<a name="line.462"></a>
+<span class="sourceLineNo">463</span>    } else {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      this.blockNumber.increment();<a name="line.464"></a>
+<span class="sourceLineNo">465</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.465"></a>
+<span class="sourceLineNo">466</span>      blocksByHFile.add(cacheKey);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>    }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
+<span class="sourceLineNo">469</span><a name="line.469"></a>
+<span class="sourceLineNo">470</span>  /**<a name="line.470"></a>
+<span class="sourceLineNo">471</span>   * Get the buffer of the block with the specified key.<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @param key block's cache key<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.474"></a>
+<span class="sourceLineNo">475</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.475"></a>
+<span class="sourceLineNo">476</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
+<span class="sourceLineNo">478</span>  @Override<a name="line.478"></a>
+<span class="sourceLineNo">479</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.479"></a>
+<span class="sourceLineNo">480</span>      boolean updateCacheMetrics) {<a name="line.480"></a>
+<span class="sourceLineNo">481</span>    if (!cacheEnabled) {<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      return null;<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    }<a name="line.483"></a>
+<span class="sourceLineNo">484</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    if (re != null) {<a name="line.485"></a>
+<span class="sourceLineNo">486</span>      if (updateCacheMetrics) {<a name="line.486"></a>
+<span class="sourceLineNo">487</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.487"></a>
+<span class="sourceLineNo">488</span>      }<a name="line.488"></a>
+<span class="sourceLineNo">489</span>      re.access(accessCount.incrementAndGet());<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      return re.getData();<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    }<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    if (bucketEntry != null) {<a name="line.493"></a>
+<span class="sourceLineNo">494</span>      long start = System.nanoTime();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.495"></a>
+<span class="sourceLineNo">496</span>      try {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>        lock.readLock().lock();<a name="line.497"></a>
+<span class="sourceLineNo">498</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.498"></a>
+<span class="sourceLineNo">499</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.499"></a>
+<span class="sourceLineNo">500</span>        // existence here.<a name="line.500"></a>
+<span class="sourceLineNo">501</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.501"></a>
+<span class="sourceLineNo">502</span>          // TODO : change this area - should be removed after server cells and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>          // 12295 are available<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          int len = bucketEntry.getLength();<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          if (LOG.isTraceEnabled()) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>          }<a name="line.507"></a>
+<span class="sourceLineNo">508</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.508"></a>
+<span class="sourceLineNo">509</span>              bucketEntry.deserializerReference());<a name="line.509"></a>
+<span class="sourceLineNo">510</span>          long timeTaken = System.nanoTime() - start;<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          if (updateCacheMetrics) {<a name="line.511"></a>
+<span class="sourceLineNo">512</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.512"></a>
+<span class="sourceLineNo">513</span>            cacheStats.ioHit(timeTaken);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
+<span class="sourceLineNo">515</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>            bucketEntry.incrementRefCountAndGet();<a name="line.516"></a>
+<span class="sourceLineNo">517</span>          }<a name="line.517"></a>
+<span class="sourceLineNo">518</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.518"></a>
+<span class="sourceLineNo">519</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.519"></a>
+<span class="sourceLineNo">520</span>            ioErrorStartTime = -1;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          }<a name="line.521"></a>
+<span class="sourceLineNo">522</span>          return cachedBlock;<a name="line.522"></a>
+<span class="sourceLin

<TRUNCATED>

[31/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
index e50f682..7d5287a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
@@ -33,37 +33,55 @@
 <span class="sourceLineNo">025</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.25"></a>
 <span class="sourceLineNo">026</span><a name="line.26"></a>
 <span class="sourceLineNo">027</span>/**<a name="line.27"></a>
-<span class="sourceLineNo">028</span> * This class is used to manage the identifiers for<a name="line.28"></a>
-<span class="sourceLineNo">029</span> * {@link CacheableDeserializer}<a name="line.29"></a>
-<span class="sourceLineNo">030</span> */<a name="line.30"></a>
-<span class="sourceLineNo">031</span>@InterfaceAudience.Private<a name="line.31"></a>
-<span class="sourceLineNo">032</span>public class CacheableDeserializerIdManager {<a name="line.32"></a>
-<span class="sourceLineNo">033</span>  private static final Map&lt;Integer, CacheableDeserializer&lt;Cacheable&gt;&gt; registeredDeserializers = new HashMap&lt;&gt;();<a name="line.33"></a>
-<span class="sourceLineNo">034</span>  private static final AtomicInteger identifier = new AtomicInteger(0);<a name="line.34"></a>
-<span class="sourceLineNo">035</span><a name="line.35"></a>
-<span class="sourceLineNo">036</span>  /**<a name="line.36"></a>
-<span class="sourceLineNo">037</span>   * Register the given cacheable deserializer and generate an unique identifier<a name="line.37"></a>
-<span class="sourceLineNo">038</span>   * id for it<a name="line.38"></a>
-<span class="sourceLineNo">039</span>   * @param cd<a name="line.39"></a>
-<span class="sourceLineNo">040</span>   * @return the identifier of given cacheable deserializer<a name="line.40"></a>
-<span class="sourceLineNo">041</span>   */<a name="line.41"></a>
-<span class="sourceLineNo">042</span>  public static int registerDeserializer(CacheableDeserializer&lt;Cacheable&gt; cd) {<a name="line.42"></a>
-<span class="sourceLineNo">043</span>    int idx = identifier.incrementAndGet();<a name="line.43"></a>
-<span class="sourceLineNo">044</span>    synchronized (registeredDeserializers) {<a name="line.44"></a>
-<span class="sourceLineNo">045</span>      registeredDeserializers.put(idx, cd);<a name="line.45"></a>
-<span class="sourceLineNo">046</span>    }<a name="line.46"></a>
-<span class="sourceLineNo">047</span>    return idx;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>  }<a name="line.48"></a>
-<span class="sourceLineNo">049</span><a name="line.49"></a>
-<span class="sourceLineNo">050</span>  /**<a name="line.50"></a>
-<span class="sourceLineNo">051</span>   * Get the cacheable deserializer as the given identifier Id<a name="line.51"></a>
-<span class="sourceLineNo">052</span>   * @param id<a name="line.52"></a>
-<span class="sourceLineNo">053</span>   * @return CacheableDeserializer<a name="line.53"></a>
-<span class="sourceLineNo">054</span>   */<a name="line.54"></a>
-<span class="sourceLineNo">055</span>  public static CacheableDeserializer&lt;Cacheable&gt; getDeserializer(int id) {<a name="line.55"></a>
-<span class="sourceLineNo">056</span>    return registeredDeserializers.get(id);<a name="line.56"></a>
-<span class="sourceLineNo">057</span>  }<a name="line.57"></a>
-<span class="sourceLineNo">058</span>}<a name="line.58"></a>
+<span class="sourceLineNo">028</span> * This class is used to manage the identifiers for {@link CacheableDeserializer}.<a name="line.28"></a>
+<span class="sourceLineNo">029</span> * All deserializers are registered with this Manager via the<a name="line.29"></a>
+<span class="sourceLineNo">030</span> * {@link #registerDeserializer(CacheableDeserializer)}}. On registration, we return an<a name="line.30"></a>
+<span class="sourceLineNo">031</span> * int *identifier* for this deserializer. The int identifier is passed to<a name="line.31"></a>
+<span class="sourceLineNo">032</span> * {@link #getDeserializer(int)}} to obtain the registered deserializer instance.<a name="line.32"></a>
+<span class="sourceLineNo">033</span> */<a name="line.33"></a>
+<span class="sourceLineNo">034</span>@InterfaceAudience.Private<a name="line.34"></a>
+<span class="sourceLineNo">035</span>public class CacheableDeserializerIdManager {<a name="line.35"></a>
+<span class="sourceLineNo">036</span>  private static final Map&lt;Integer, CacheableDeserializer&lt;Cacheable&gt;&gt; registeredDeserializers = new HashMap&lt;&gt;();<a name="line.36"></a>
+<span class="sourceLineNo">037</span>  private static final AtomicInteger identifier = new AtomicInteger(0);<a name="line.37"></a>
+<span class="sourceLineNo">038</span><a name="line.38"></a>
+<span class="sourceLineNo">039</span>  /**<a name="line.39"></a>
+<span class="sourceLineNo">040</span>   * Register the given {@link Cacheable} -- usually an hfileblock instance, these implement<a name="line.40"></a>
+<span class="sourceLineNo">041</span>   * the Cacheable Interface -- deserializer and generate an unique identifier id for it and return<a name="line.41"></a>
+<span class="sourceLineNo">042</span>   * this as our result.<a name="line.42"></a>
+<span class="sourceLineNo">043</span>   * @return the identifier of given cacheable deserializer<a name="line.43"></a>
+<span class="sourceLineNo">044</span>   * @see #getDeserializer(int)<a name="line.44"></a>
+<span class="sourceLineNo">045</span>   */<a name="line.45"></a>
+<span class="sourceLineNo">046</span>  public static int registerDeserializer(CacheableDeserializer&lt;Cacheable&gt; cd) {<a name="line.46"></a>
+<span class="sourceLineNo">047</span>    int idx = identifier.incrementAndGet();<a name="line.47"></a>
+<span class="sourceLineNo">048</span>    synchronized (registeredDeserializers) {<a name="line.48"></a>
+<span class="sourceLineNo">049</span>      registeredDeserializers.put(idx, cd);<a name="line.49"></a>
+<span class="sourceLineNo">050</span>    }<a name="line.50"></a>
+<span class="sourceLineNo">051</span>    return idx;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  }<a name="line.52"></a>
+<span class="sourceLineNo">053</span><a name="line.53"></a>
+<span class="sourceLineNo">054</span>  /**<a name="line.54"></a>
+<span class="sourceLineNo">055</span>   * Get the cacheable deserializer registered at the given identifier Id.<a name="line.55"></a>
+<span class="sourceLineNo">056</span>   * @see #registerDeserializer(CacheableDeserializer)<a name="line.56"></a>
+<span class="sourceLineNo">057</span>   */<a name="line.57"></a>
+<span class="sourceLineNo">058</span>  public static CacheableDeserializer&lt;Cacheable&gt; getDeserializer(int id) {<a name="line.58"></a>
+<span class="sourceLineNo">059</span>    return registeredDeserializers.get(id);<a name="line.59"></a>
+<span class="sourceLineNo">060</span>  }<a name="line.60"></a>
+<span class="sourceLineNo">061</span><a name="line.61"></a>
+<span class="sourceLineNo">062</span>  /**<a name="line.62"></a>
+<span class="sourceLineNo">063</span>   * Snapshot a map of the current identifiers to class names for reconstruction on reading out<a name="line.63"></a>
+<span class="sourceLineNo">064</span>   * of a file.<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   */<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  public static Map&lt;Integer,String&gt; save() {<a name="line.66"></a>
+<span class="sourceLineNo">067</span>    Map&lt;Integer, String&gt; snapshot = new HashMap&lt;&gt;();<a name="line.67"></a>
+<span class="sourceLineNo">068</span>    synchronized (registeredDeserializers) {<a name="line.68"></a>
+<span class="sourceLineNo">069</span>      for (Map.Entry&lt;Integer, CacheableDeserializer&lt;Cacheable&gt;&gt; entry :<a name="line.69"></a>
+<span class="sourceLineNo">070</span>          registeredDeserializers.entrySet()) {<a name="line.70"></a>
+<span class="sourceLineNo">071</span>        snapshot.put(entry.getKey(), entry.getValue().getClass().getName());<a name="line.71"></a>
+<span class="sourceLineNo">072</span>      }<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    }<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    return snapshot;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  }<a name="line.75"></a>
+<span class="sourceLineNo">076</span>}<a name="line.76"></a>
 
 
 


[15/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
index bd3c59e..21e240a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
@@ -33,62 +33,62 @@
 <span class="sourceLineNo">025</span>import java.io.FileNotFoundException;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.io.FileOutputStream;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.io.IOException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.io.ObjectInputStream;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.io.ObjectOutputStream;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.io.Serializable;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import java.nio.ByteBuffer;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.util.ArrayList;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.util.Comparator;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.util.HashSet;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.util.Iterator;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import java.util.List;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import java.util.Map;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import java.util.NavigableSet;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import java.util.PriorityQueue;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import java.util.Set;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import java.util.concurrent.BlockingQueue;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import java.util.concurrent.ConcurrentMap;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.TimeUnit;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import java.util.concurrent.atomic.LongAdder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import java.util.concurrent.locks.Lock;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.conf.Configuration;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.util.StringUtils;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.slf4j.Logger;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.slf4j.LoggerFactory;<a name="line.79"></a>
-<span class="sourceLineNo">080</span><a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.83"></a>
+<span class="sourceLineNo">028</span>import java.io.Serializable;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.nio.ByteBuffer;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.ArrayList;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.Comparator;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import java.util.HashSet;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.Iterator;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.List;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.Map;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.NavigableSet;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import java.util.PriorityQueue;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import java.util.Set;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import java.util.concurrent.BlockingQueue;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import java.util.concurrent.ConcurrentMap;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import java.util.concurrent.Executors;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.TimeUnit;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.LongAdder;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.locks.Lock;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.conf.Configuration;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.protobuf.ProtobufMagic;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.util.StringUtils;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.Logger;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.slf4j.LoggerFactory;<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos;<a name="line.83"></a>
 <span class="sourceLineNo">084</span><a name="line.84"></a>
 <span class="sourceLineNo">085</span>/**<a name="line.85"></a>
 <span class="sourceLineNo">086</span> * BucketCache uses {@link BucketAllocator} to allocate/free blocks, and uses<a name="line.86"></a>
@@ -172,1540 +172,1557 @@
 <span class="sourceLineNo">164</span>  private volatile boolean freeInProgress = false;<a name="line.164"></a>
 <span class="sourceLineNo">165</span>  private final Lock freeSpaceLock = new ReentrantLock();<a name="line.165"></a>
 <span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  private UniqueIndexMap&lt;Integer&gt; deserialiserMap = new UniqueIndexMap&lt;&gt;();<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  private final LongAdder heapSize = new LongAdder();<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  /** Current number of cached elements */<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.172"></a>
-<span class="sourceLineNo">173</span><a name="line.173"></a>
-<span class="sourceLineNo">174</span>  /** Cache access count (sequential ID) */<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  boolean wait_when_cache = false;<a name="line.181"></a>
+<span class="sourceLineNo">167</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  private final LongAdder heapSize = new LongAdder();<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  /** Current number of cached elements */<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>  /** Cache access count (sequential ID) */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.176"></a>
+<span class="sourceLineNo">177</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.177"></a>
+<span class="sourceLineNo">178</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.178"></a>
+<span class="sourceLineNo">179</span>  boolean wait_when_cache = false;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.181"></a>
 <span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  private final String persistencePath;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  private final long cacheCapacity;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /** Approximate block size */<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  private final long blockSize;<a name="line.188"></a>
-<span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  private final int ioErrorsTolerationDuration;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  // 1 min<a name="line.192"></a>
-<span class="sourceLineNo">193</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.193"></a>
-<span class="sourceLineNo">194</span><a name="line.194"></a>
-<span class="sourceLineNo">195</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  // reset after a successful read/write.<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private volatile long ioErrorStartTime = -1;<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * &lt;p&gt;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   */<a name="line.204"></a>
-<span class="sourceLineNo">205</span>  @VisibleForTesting<a name="line.205"></a>
-<span class="sourceLineNo">206</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        @Override<a name="line.210"></a>
-<span class="sourceLineNo">211</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.212"></a>
-<span class="sourceLineNo">213</span>          if (nameComparison != 0) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            return nameComparison;<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>          if (a.getOffset() == b.getOffset()) {<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            return 0;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            return -1;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>          }<a name="line.221"></a>
-<span class="sourceLineNo">222</span>          return 1;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      });<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.226"></a>
-<span class="sourceLineNo">227</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  // Allocate or free space for the block<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private BucketAllocator bucketAllocator;<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  private float acceptableFactor;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private float minFactor;<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private float extraFreeFactor;<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /** Single access bucket size */<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  private float singleFactor;<a name="line.243"></a>
-<span class="sourceLineNo">244</span><a name="line.244"></a>
-<span class="sourceLineNo">245</span>  /** Multiple access bucket size */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private float multiFactor;<a name="line.246"></a>
-<span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  /** In-memory bucket size */<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private float memoryFactor;<a name="line.249"></a>
-<span class="sourceLineNo">250</span><a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      IOException {<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  }<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.258"></a>
-<span class="sourceLineNo">259</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.259"></a>
-<span class="sourceLineNo">260</span>                     Configuration conf)<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      throws FileNotFoundException, IOException {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    long blockNumCapacity = capacity / blockSize;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      // Enough for about 32TB of cache!<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
-<span class="sourceLineNo">269</span><a name="line.269"></a>
-<span class="sourceLineNo">270</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.275"></a>
+<span class="sourceLineNo">183</span>  private final String persistencePath;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  private final long cacheCapacity;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  /** Approximate block size */<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  private final long blockSize;<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  private final int ioErrorsTolerationDuration;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  // 1 min<a name="line.190"></a>
+<span class="sourceLineNo">191</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  // reset after a successful read/write.<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  private volatile long ioErrorStartTime = -1;<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;p&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   */<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  @VisibleForTesting<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.207"></a>
+<span class="sourceLineNo">208</span>        @Override<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.210"></a>
+<span class="sourceLineNo">211</span>          if (nameComparison != 0) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>            return nameComparison;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>          }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>          if (a.getOffset() == b.getOffset()) {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>            return 0;<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>            return -1;<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>          return 1;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        }<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      });<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.224"></a>
+<span class="sourceLineNo">225</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>  // Allocate or free space for the block<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private BucketAllocator bucketAllocator;<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  private float acceptableFactor;<a name="line.232"></a>
+<span class="sourceLineNo">233</span><a name="line.233"></a>
+<span class="sourceLineNo">234</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  private float minFactor;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private float extraFreeFactor;<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>  /** Single access bucket size */<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  private float singleFactor;<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /** Multiple access bucket size */<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private float multiFactor;<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>  /** In-memory bucket size */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private float memoryFactor;<a name="line.247"></a>
+<span class="sourceLineNo">248</span><a name="line.248"></a>
+<span class="sourceLineNo">249</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      IOException {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  }<a name="line.254"></a>
+<span class="sourceLineNo">255</span><a name="line.255"></a>
+<span class="sourceLineNo">256</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.256"></a>
+<span class="sourceLineNo">257</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.257"></a>
+<span class="sourceLineNo">258</span>                     Configuration conf)<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      throws FileNotFoundException, IOException {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    long blockNumCapacity = capacity / blockSize;<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      // Enough for about 32TB of cache!<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
+<span class="sourceLineNo">267</span><a name="line.267"></a>
+<span class="sourceLineNo">268</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>    sanityCheckConfigs();<a name="line.275"></a>
 <span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    sanityCheckConfigs();<a name="line.277"></a>
-<span class="sourceLineNo">278</span><a name="line.278"></a>
-<span class="sourceLineNo">279</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.280"></a>
-<span class="sourceLineNo">281</span>        ", memoryFactor: " + memoryFactor);<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    this.cacheCapacity = capacity;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    this.persistencePath = persistencePath;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    this.blockSize = blockSize;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.286"></a>
-<span class="sourceLineNo">287</span><a name="line.287"></a>
-<span class="sourceLineNo">288</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>    assert writerQueues.size() == writerThreads.length;<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.294"></a>
+<span class="sourceLineNo">277</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.277"></a>
+<span class="sourceLineNo">278</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.278"></a>
+<span class="sourceLineNo">279</span>        ", memoryFactor: " + memoryFactor);<a name="line.279"></a>
+<span class="sourceLineNo">280</span><a name="line.280"></a>
+<span class="sourceLineNo">281</span>    this.cacheCapacity = capacity;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    this.persistencePath = persistencePath;<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    this.blockSize = blockSize;<a name="line.283"></a>
+<span class="sourceLineNo">284</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    }<a name="line.289"></a>
+<span class="sourceLineNo">290</span><a name="line.290"></a>
+<span class="sourceLineNo">291</span>    assert writerQueues.size() == writerThreads.length;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.294"></a>
 <span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        retrieveFromFile(bucketSizes);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      } catch (IOException ioex) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        LOG.error("Can't restore from file because of", ioex);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      } catch (ClassNotFoundException cnfe) {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        LOG.error("Can't restore from file in rebuild because can't deserialise",cnfe);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        throw new RuntimeException(cnfe);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    }<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    final String threadName = Thread.currentThread().getName();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    this.cacheEnabled = true;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      writerThreads[i].setDaemon(true);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    startWriterThreads();<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    // every five minutes.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.320"></a>
-<span class="sourceLineNo">321</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  }<a name="line.327"></a>
-<span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  private void sanityCheckConfigs() {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.337"></a>
-<span class="sourceLineNo">338</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  }<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * starting the threads.<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  @VisibleForTesting<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected void startWriterThreads() {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    for (WriterThread thread : writerThreads) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>      thread.start();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">296</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      try {<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        retrieveFromFile(bucketSizes);<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      } catch (IOException ioex) {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>        LOG.error("Can't restore from file[" + persistencePath + "] because of ", ioex);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    }<a name="line.302"></a>
+<span class="sourceLineNo">303</span>    final String threadName = Thread.currentThread().getName();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    this.cacheEnabled = true;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.306"></a>
+<span class="sourceLineNo">307</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>      writerThreads[i].setDaemon(true);<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    startWriterThreads();<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    // every five minutes.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.315"></a>
+<span class="sourceLineNo">316</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.317"></a>
+<span class="sourceLineNo">318</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.318"></a>
+<span class="sourceLineNo">319</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
+<span class="sourceLineNo">323</span><a name="line.323"></a>
+<span class="sourceLineNo">324</span>  private void sanityCheckConfigs() {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.325"></a>
+<span class="sourceLineNo">326</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.327"></a>
+<span class="sourceLineNo">328</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.332"></a>
+<span class="sourceLineNo">333</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.333"></a>
+<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * starting the threads.<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   */<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  @VisibleForTesting<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  protected void startWriterThreads() {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    for (WriterThread thread : writerThreads) {<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      thread.start();<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  }<a name="line.345"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  boolean isCacheEnabled() {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    return this.cacheEnabled;<a name="line.349"></a>
 <span class="sourceLineNo">350</span>  }<a name="line.350"></a>
 <span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>  @VisibleForTesting<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  boolean isCacheEnabled() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    return this.cacheEnabled;<a name="line.354"></a>
+<span class="sourceLineNo">352</span>  @Override<a name="line.352"></a>
+<span class="sourceLineNo">353</span>  public long getMaxSize() {<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    return this.cacheCapacity;<a name="line.354"></a>
 <span class="sourceLineNo">355</span>  }<a name="line.355"></a>
 <span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>  public long getMaxSize() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    return this.cacheCapacity;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  public String getIoEngine() {<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return ioEngine.toString();<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Get the IOEngine from the IO engine name<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   * @param ioEngineName<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param capacity<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   * @param persistencePath<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @return the IOEngine<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   * @throws IOException<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
-<span class="sourceLineNo">374</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      throws IOException {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      // the compatibility<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return new ByteBufferIOEngine(capacity);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    } else {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      throw new IllegalArgumentException(<a name="line.388"></a>
-<span class="sourceLineNo">389</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Cache the block with the specified name and buffer.<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   * @param cacheKey block's cache key<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param buf block buffer<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   */<a name="line.397"></a>
-<span class="sourceLineNo">398</span>  @Override<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    cacheBlock(cacheKey, buf, false);<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  }<a name="line.401"></a>
-<span class="sourceLineNo">402</span><a name="line.402"></a>
-<span class="sourceLineNo">403</span>  /**<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * Cache the block with the specified name and buffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   * @param cacheKey block's cache key<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @param cachedItem block buffer<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   * @param inMemory if block is in-memory<a name="line.407"></a>
-<span class="sourceLineNo">408</span>   */<a name="line.408"></a>
-<span class="sourceLineNo">409</span>  @Override<a name="line.409"></a>
-<span class="sourceLineNo">410</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * Cache the block to ramCache<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   * @param cacheKey block's cache key<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @param cachedItem block buffer<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @param inMemory if block is in-memory<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * @param wait if true, blocking wait when queue is full<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.421"></a>
-<span class="sourceLineNo">422</span>      boolean wait) {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    if (cacheEnabled) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>      } else {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      boolean inMemory, boolean wait) {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    if (!cacheEnabled) {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      return;<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    }<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.439"></a>
-<span class="sourceLineNo">440</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    RAMQueueEntry re =<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.447"></a>
-<span class="sourceLineNo">448</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.448"></a>
-<span class="sourceLineNo">449</span>     */<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      return;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    boolean successfulAddition = false;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    if (wait) {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      try {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.458"></a>
-<span class="sourceLineNo">459</span>      } catch (InterruptedException e) {<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        Thread.currentThread().interrupt();<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    } else {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>      successfulAddition = bq.offer(re);<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    }<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    if (!successfulAddition) {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>      ramCache.remove(cacheKey);<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      cacheStats.failInsert();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    } else {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      this.blockNumber.increment();<a name="line.469"></a>
-<span class="sourceLineNo">470</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      blocksByHFile.add(cacheKey);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    }<a name="line.472"></a>
-<span class="sourceLineNo">473</span>  }<a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Get the buffer of the block with the specified key.<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   * @param key block's cache key<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   */<a name="line.482"></a>
-<span class="sourceLineNo">483</span>  @Override<a name="line.483"></a>
-<span class="sourceLineNo">484</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      boolean updateCacheMetrics) {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    if (!cacheEnabled) {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>      return null;<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    }<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    if (re != null) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      if (updateCacheMetrics) {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      }<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      re.access(accessCount.incrementAndGet());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      return re.getData();<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    if (bucketEntry != null) {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      long start = System.nanoTime();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      try {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        lock.readLock().lock();<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // existence here.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>          // TODO : change this area - should be removed after server cells and<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // 12295 are available<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          int len = bucketEntry.getLength();<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          if (LOG.isTraceEnabled()) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.511"></a>
-<span class="sourceLineNo">512</span>          }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.513"></a>
-<span class="sourceLineNo">514</span>              bucketEntry.deserializerReference(this.deserialiserMap));<a name="line.514"></a>
-<span class="sourceLineNo">515</span>          long timeTaken = System.nanoTime() - start;<a name="line.515"></a>
-<span class="sourceLineNo">516</span>          if (updateCacheMetrics) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.517"></a>
-<span class="sourceLineNo">518</span>            cacheStats.ioHit(timeTaken);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>            bucketEntry.incrementRefCountAndGet();<a name="line.521"></a>
-<span class="sourceLineNo">522</span>          }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.523"></a>
-<span class="sourceLineNo">524</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>            ioErrorStartTime = -1;<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          }<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          return cachedBlock;<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        }<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      } catch (IOException ioex) {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        LOG.error("Failed reading block " + key + " from bucket cache", ioex);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>        checkIOErrorIsTolerated();<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      } finally {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        lock.readLock().unlock();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      }<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>    if (!repeat &amp;&amp; updateCacheMetrics) {<a name="line.536"></a>
-<span class="sourceLineNo">537</span>      cacheStats.miss(caching, key.isPrimary(), key.getBlockType());<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    }<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    return null;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  }<a name="line.540"></a>
-<span class="sourceLineNo">541</span><a name="line.541"></a>
-<span class="sourceLineNo">542</span>  @VisibleForTesting<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  void blockEvicted(BlockCacheKey cacheKey, BucketEntry bucketEntry, boolean decrementBlockNumber) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    bucketAllocator.freeBlock(bucketEntry.offset());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    realCacheSize.add(-1 * bucketEntry.getLength());<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    blocksByHFile.remove(cacheKey);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    if (decrementBlockNumber) {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.blockNumber.decrement();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
+<span class="sourceLineNo">357</span>  public String getIoEngine() {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    return ioEngine.toString();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Get the IOEngine from the IO engine name<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * @param ioEngineName<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * @param capacity<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * @param persistencePath<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return the IOEngine<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * @throws IOException<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throws IOException {<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.372"></a>
+<span class="sourceLineNo">373</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      // the compatibility<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.375"></a>
+<span class="sourceLineNo">376</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.378"></a>
+<span class="sourceLineNo">379</span>      return new ByteBufferIOEngine(capacity);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    } else {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      throw new IllegalArgumentException(<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  }<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * Cache the block with the specified name and buffer.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @param cacheKey block's cache key<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   * @param buf block buffer<a name="line.391"></a>
+<span class="sourceLineNo">392</span>   */<a name="line.392"></a>
+<span class="sourceLineNo">393</span>  @Override<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    cacheBlock(cacheKey, buf, false);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   * Cache the block with the specified name and buffer.<a name="line.399"></a>
+<span class="sourceLineNo">400</span>   * @param cacheKey block's cache key<a name="line.400"></a>
+<span class="sourceLineNo">401</span>   * @param cachedItem block buffer<a name="line.401"></a>
+<span class="sourceLineNo">402</span>   * @param inMemory if block is in-memory<a name="line.402"></a>
+<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  @Override<a name="line.404"></a>
+<span class="sourceLineNo">405</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * Cache the block to ramCache<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   * @param cacheKey block's cache key<a name="line.411"></a>
+<span class="sourceLineNo">412</span>   * @param cachedItem block buffer<a name="line.412"></a>
+<span class="sourceLineNo">413</span>   * @param inMemory if block is in-memory<a name="line.413"></a>
+<span class="sourceLineNo">414</span>   * @param wait if true, blocking wait when queue is full<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.416"></a>
+<span class="sourceLineNo">417</span>      boolean wait) {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    if (cacheEnabled) {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.420"></a>
+<span class="sourceLineNo">421</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.421"></a>
+<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      } else {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      }<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  }<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      boolean inMemory, boolean wait) {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    if (!cacheEnabled) {<a name="line.431"></a>
+<span class="sourceLineNo">432</span>      return;<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    RAMQueueEntry re =<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    /**<a name="line.438"></a>
+<span class="sourceLineNo">439</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.439"></a>
+<span class="sourceLineNo">440</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.440"></a>
+<span class="sourceLineNo">441</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.441"></a>
+<span class="sourceLineNo">442</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.442"></a>
+<span class="sourceLineNo">443</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>     */<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      return;<a name="line.446"></a>
+<span class="sourceLineNo">447</span>    }<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    boolean successfulAddition = false;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    if (wait) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      try {<a name="line.452"></a>
+<span class="sourceLineNo">453</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.453"></a>
+<span class="sourceLineNo">454</span>      } catch (InterruptedException e) {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>        Thread.currentThread().interrupt();<a name="line.455"></a>
+<span class="sourceLineNo">456</span>      }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>    } else {<a name="line.457"></a>
+<span class="sourceLineNo">458</span>      successfulAddition = bq.offer(re);<a name="line.458"></a>
+<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
+<span class="sourceLineNo">460</span>    if (!successfulAddition) {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>      ramCache.remove(cacheKey);<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      cacheStats.failInsert();<a name="line.462"></a>
+<span class="sourceLineNo">463</span>    } else {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      this.blockNumber.increment();<a name="line.464"></a>
+<span class="sourceLineNo">465</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.465"></a>
+<span class="sourceLineNo">466</span>      blocksByHFile.add(cacheKey);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>    }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
+<span class="sourceLineNo">469</span><a name="line.469"></a>
+<span class="sourceLineNo">470</span>  /**<a name="line.470"></a>
+<span class="sourceLineNo">471</span>   * Get the buffer of the block with the specified key.<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @param key block's cache key<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.474"></a>
+<span class="sourceLineNo">475</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.475"></a>
+<span class="sourceLineNo">476</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
+<span class="sourceLineNo">478</span>  @Override<a name="line.478"></a>
+<span class="sourceLineNo">479</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.479"></a>
+<span class="sourceLineNo">480</span>      boolean updateCacheMetrics) {<a name="line.480"></a>
+<span class="sourceLineNo">481</span>    if (!cacheEnabled) {<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      return null;<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    }<a name="line.483"></a>
+<span class="sourceLineNo">484</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    if (re != null) {<a name="line.485"></a>
+<span class="sourceLineNo">486</span>      if (updateCacheMetrics) {<a name="line.486"></a>
+<span class="sourceLineNo">487</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.487"></a>
+<span class="sourceLineNo">488</span>      }<a name="line.488"></a>
+<span class="sourceLineNo">489</span>      re.access(accessCount.incrementAndGet());<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      return re.getData();<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    }<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    if (bucketEntry != null) {<a name="line.493"></a>
+<span class="sourceLineNo">494</span>      long start = System.nanoTime();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.495"></a>
+<span class="sourceLineNo">496</span>      try {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>        lock.readLock().lock();<a name="line.497"></a>
+<span class="sourceLineNo">498</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.498"></a>
+<span class="sourceLineNo">499</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.499"></a>
+<span class="sourceLineNo">500</span>        // existence here.<a name="line.500"></a>
+<span class="sourceLineNo">501</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.501"></a>
+<span class="sourceLineNo">502</span>          // TODO : change this area - should be removed after server cells and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>          // 12295 are available<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          int len = bucketEntry.getLength();<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          if (LOG.isTraceEnabled()) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>          }<a name="line.507"></a>
+<span class="sourceLineNo">508</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.508"></a>
+<span class="sourceLineNo">509</span>              bucketEntry.deserializerReference());<a name="line.509"></a>
+<span class="sourceLineNo">510</span>          long timeTaken = System.nanoTime() - start;<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          if (updateCacheMetrics) {<a name="line.511"></a>
+<span class="sourceLineNo">512</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.512"></a>
+<span class="sourceLineNo">513</span>            cacheStats.ioHit(timeTaken);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
+<span class="sourceLineNo">515</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>            bucketEntry.incrementRefCountAndGet();<a name="line.516"></a>
+<span class="sourceLineNo">517</span>          }<a name="line.517"></a>
+<span class="sourceLineNo">518</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.518"></a>
+<span class="sourceLineNo">519</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.519"></a>
+<span class="sourceLineNo">520</span>            ioErrorStartTime = -1;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          }<a name="line.521"></a>
+<span class="sourceLineNo">522</span>          return cachedBlock;<a name="line.522"></a>
+<span class="sourceLineNo">

<TRUNCATED>

[23/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
index b7b4236..3d1edb3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
@@ -259,1863 +259,1867 @@
 <span class="sourceLineNo">251</span>   * + Metadata!  + &lt;= See note on BLOCK_METADATA_SPACE above.<a name="line.251"></a>
 <span class="sourceLineNo">252</span>   * ++++++++++++++<a name="line.252"></a>
 <span class="sourceLineNo">253</span>   * &lt;/code&gt;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer)<a name="line.254"></a>
+<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer, boolean)<a name="line.254"></a>
 <span class="sourceLineNo">255</span>   */<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER =<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    @Override<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      // The buf has the file block followed by block metadata.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      ByteBuff newByteBuff;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      if (reuse) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        newByteBuff = buf.slice();<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      } else {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        int len = buf.limit();<a name="line.269"></a>
-<span class="sourceLineNo">270</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      buf.position(buf.limit());<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      long offset = buf.getLong();<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      HFileBlock hFileBlock =<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      return hFileBlock;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    public int getDeserialiserIdentifier() {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return DESERIALIZER_IDENTIFIER;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    @Override<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      // Used only in tests<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  };<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  static {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    DESERIALIZER_IDENTIFIER =<a name="line.298"></a>
-<span class="sourceLineNo">299</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>  }<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  /**<a name="line.302"></a>
-<span class="sourceLineNo">303</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.303"></a>
-<span class="sourceLineNo">304</span>   */<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private HFileBlock(HFileBlock that) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    this(that, false);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
-<span class="sourceLineNo">308</span><a name="line.308"></a>
-<span class="sourceLineNo">309</span>  /**<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.310"></a>
-<span class="sourceLineNo">311</span>   * param.<a name="line.311"></a>
-<span class="sourceLineNo">312</span>   */<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.314"></a>
-<span class="sourceLineNo">315</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    if (bufCopy) {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    } else {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.buf = that.buf.duplicate();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    }<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * is used only while writing blocks and caching,<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   *<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.331"></a>
+<span class="sourceLineNo">256</span>  public static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER = new BlockDeserializer();<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  public static final class BlockDeserializer implements CacheableDeserializer&lt;Cacheable&gt; {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    private BlockDeserializer() {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>    @Override<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        throws IOException {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      // The buf has the file block followed by block metadata.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      ByteBuff newByteBuff;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      if (reuse) {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>        newByteBuff = buf.slice();<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      } else {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        int len = buf.limit();<a name="line.273"></a>
+<span class="sourceLineNo">274</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      buf.position(buf.limit());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      long offset = buf.getLong();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      HFileBlock hFileBlock =<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      return hFileBlock;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>    @Override<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    public int getDeserialiserIdentifier() {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      return DESERIALIZER_IDENTIFIER;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>    @Override<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      // Used only in tests<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  static {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    DESERIALIZER_IDENTIFIER =<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  }<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /**<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   */<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private HFileBlock(HFileBlock that) {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    this(that, false);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * param.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.318"></a>
+<span class="sourceLineNo">319</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    if (bufCopy) {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    } else {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>      this.buf = that.buf.duplicate();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    }<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * is used only while writing blocks and caching,<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.331"></a>
 <span class="sourceLineNo">332</span>   *<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @param offset the file offset the block was read from<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   * @param fileContext HFile meta data<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  @VisibleForTesting<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      HFileContext fileContext) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    this.buf = new SingleByteBuff(b);<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    if (fillHeader) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      overwriteHeader();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    this.buf.rewind();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  /**<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * to that point.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>    buf.rewind();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    final BlockType blockType = BlockType.read(buf);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    final int uncompressedSizeWithoutHeader =<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    int onDiskDataSizeWithHeader;<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    if (usesHBaseChecksum) {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    fileContext = fileContextBuilder.build();<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    this.memType = memType;<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    this.offset = offset;<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    this.buf = buf;<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    this.buf.rewind();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  }<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  /**<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   * Called from constructors.<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      HFileContext fileContext) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    this.blockType = blockType;<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    this.offset = offset;<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    this.fileContext = fileContext;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  /**<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * Parse total on disk size including header and checksum.<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   * @return Size of the block with header included.<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   */<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      boolean verifyChecksum) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      headerSize(verifyChecksum);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * a read of the next block when scanning or running over a file.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  int getNextBlockOnDiskSize() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    return nextBlockOnDiskSize;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  @Override<a name="line.439"></a>
-<span class="sourceLineNo">440</span>  public BlockType getBlockType() {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    return blockType;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  short getDataBlockEncodingId() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    return buf.getShort(headerSize());<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  }<a name="line.451"></a>
-<span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>  /**<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   * @return the on-disk size of header + data part + checksum.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>   */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  public int getOnDiskSizeWithHeader() {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  int getOnDiskSizeWithoutHeader() {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    return onDiskSizeWithoutHeader;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>  }<a name="line.465"></a>
-<span class="sourceLineNo">466</span><a name="line.466"></a>
-<span class="sourceLineNo">467</span>  /**<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.468"></a>
-<span class="sourceLineNo">469</span>   */<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   int getUncompressedSizeWithoutHeader() {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    return uncompressedSizeWithoutHeader;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /**<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   *         -1 if unknown<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  long getPrevBlockOffset() {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    return prevBlockOffset;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>  }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>  /**<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * is modified as side-effect.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   */<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  private void overwriteHeader() {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    buf.rewind();<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    blockType.write(buf);<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    buf.putLong(prevBlockOffset);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>  }<a name="line.497"></a>
-<span class="sourceLineNo">498</span><a name="line.498"></a>
-<span class="sourceLineNo">499</span>  /**<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * Returns a buffer that does not include the header or checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   *<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    ByteBuff dup = getBufferReadOnly();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    // Now set it up so Buffer spans content only -- no header or no checksums.<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    return dup.position(headerSize()).limit(buf.limit() - totalChecksumBytes()).slice();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Returns a read-only duplicate of the buffer this block stores internally ready to be read.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * Clients must not modify the buffer object though they may set position and limit on the<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   * returned buffer since we pass back a duplicate. This method has to be public because it is used<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * filter lookup, but has to be used with caution. Buffer holds header, block content,<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * and any follow-on checksums if present.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   *<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @return the buffer of this block for read-only operations<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   */<a name="line.519"></a>
-<span class="sourceLineNo">520</span>  public ByteBuff getBufferReadOnly() {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    // TODO: ByteBuf does not support asReadOnlyBuffer(). Fix.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    ByteBuff dup = this.buf.duplicate();<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    assert dup.position() == 0;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return dup;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  @VisibleForTesting<a name="line.527"></a>
-<span class="sourceLineNo">528</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String fieldName) throws IOException {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    if (valueFromBuf != valueFromField) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  @VisibleForTesting<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      throws IOException {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    if (valueFromBuf != valueFromField) {<a name="line.539"></a>
-<span class="sourceLineNo">540</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.540"></a>
-<span class="sourceLineNo">541</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  }<a name="line.543"></a>
-<span class="sourceLineNo">544</span><a name="line.544"></a>
-<span class="sourceLineNo">545</span>  /**<a name="line.545"></a>
-<span class="sourceLineNo">546</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.547"></a>
-<span class="sourceLineNo">548</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * This function is primary for testing and debugging, and is not<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * Used by tests only.<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   */<a name="line.552"></a>
-<span class="sourceLineNo">553</span>  @VisibleForTesting<a name="line.553"></a>
-<span class="sourceLineNo">554</span>  void sanityCheck() throws IOException {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>    // Duplicate so no side-effects<a name="line.555"></a>
-<span class="sourceLineNo">556</span>    ByteBuff dup = this.buf.duplicate().rewind();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    sanityCheckAssertion(BlockType.read(dup), blockType);<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    sanityCheckAssertion(dup.getInt(), onDiskSizeWithoutHeader, "onDiskSizeWithoutHeader");<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    sanityCheckAssertion(dup.getInt(), uncompressedSizeWithoutHeader,<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        "uncompressedSizeWithoutHeader");<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    sanityCheckAssertion(dup.getLong(), prevBlockOffset, "prevBlockOffset");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      sanityCheckAssertion(dup.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      sanityCheckAssertion(dup.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.567"></a>
-<span class="sourceLineNo">568</span>          "bytesPerChecksum");<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      sanityCheckAssertion(dup.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
-<span class="sourceLineNo">571</span><a name="line.571"></a>
-<span class="sourceLineNo">572</span>    int cksumBytes = totalChecksumBytes();<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (dup.limit() != expectedBufLimit) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      throw new AssertionError("Expected limit " + expectedBufLimit + ", got " + dup.limit());<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    int hdrSize = headerSize();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    if (dup.capacity() != expectedBufLimit &amp;&amp; dup.capacity() != expectedBufLimit + hdrSize) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      throw new AssertionError("Invalid buffer capacity: " + dup.capacity() +<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  @Override<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  public String toString() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    StringBuilder sb = new StringBuilder()<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      .append("[")<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      .append("blockType=").append(blockType)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      .append(", fileOffset=").append(offset)<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      .append(", headerSize=").append(headerSize())<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      .append(", onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      .append(", uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      .append(", prevBlockOffset=").append(prevBlockOffset)<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      .append(", isUseHBaseChecksum=").append(fileContext.isUseHBaseChecksum());<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      sb.append(", checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.599"></a>
-<span class="sourceLineNo">600</span>        .append(", bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        .append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      sb.append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    String dataBegin = null;<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    if (buf.hasArray()) {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    } else {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.613"></a>
-<span class="sourceLineNo">614</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>    sb.append(", getOnDiskSizeWithHeader=").append(getOnDiskSizeWithHeader())<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      .append(", totalChecksumBytes=").append(totalChecksumBytes())<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      .append(", isUnpacked=").append(isUnpacked())<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      .append(", buf=[").append(buf).append("]")<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      .append(", dataBeginsWith=").append(dataBegin)<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      .append(", fileContext=").append(fileContext)<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      .append(", nextBlockOnDiskSize=").append(nextBlockOnDiskSize)<a name="line.624"></a>
-<span class="sourceLineNo">625</span>      .append("]");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    return sb.toString();<a name="line.626"></a>
-<span class="sourceLineNo">627</span>  }<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>  /**<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      // encryption details.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();<a name="line.645"></a>
-<span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>    ByteBuff dup = this.buf.duplicate();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    dup.position(this.headerSize());<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    dup = dup.slice();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      dup);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    return unpacked;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  /**<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   * Always allocates a new buffer of the correct size. Copies header bytes<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * from the existing buffer. Does not change header fields.<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   * Reserve room to keep checksum bytes too.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  private void allocateBuffer() {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    int cksumBytes = totalChecksumBytes();<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    int headerSize = headerSize();<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    int capacityNeeded = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // TODO we need consider allocating offheap here?<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);<a name="line.667"></a>
-<span class="sourceLineNo">668</span><a name="line.668"></a>
-<span class="sourceLineNo">669</span>    // Copy header bytes into newBuf.<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    // newBuf is HBB so no issue in calling array()<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    buf.position(0);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    buf.get(newBuf.array(), newBuf.arrayOffset(), headerSize);<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    buf = new SingleByteBuff(newBuf);<a name="line.674"></a>
-<span class="sourceLineNo">675</span>    // set limit to exclude next block's header<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * Return true when this block's buffer has been unpacked, false otherwise. Note this is a<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   * calculated heuristic, not tracked attribute of the block.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>   */<a name="line.682"></a>
-<span class="sourceLineNo">683</span>  public boolean isUnpacked() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>    final int cksumBytes = totalChecksumBytes();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    final int headerSize = headerSize();<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    final int bufCapacity = buf.capacity();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;<a name="line.688"></a>
-<span class="sourceLineNo">689</span>  }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>  /** An additional sanity-check in case no compression or encryption is being used. */<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  @VisibleForTesting<a name="line.692"></a>
-<span class="sourceLineNo">693</span>  void sanityCheckUncompressedSize() throws IOException {<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader + totalChecksumBytes()) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      throw new IOException("Using no compression but "<a name="line.695"></a>
-<span class="sourceLineNo">696</span>          + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "<a name="line.696"></a>
-<span class="sourceLineNo">697</span>          + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader<a name="line.697"></a>
-<span class="sourceLineNo">698</span>          + ", numChecksumbytes=" + totalChecksumBytes());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    }<a name="line.699"></a>
-<span class="sourceLineNo">700</span>  }<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>  /**<a name="line.702"></a>
-<span class="sourceLineNo">703</span>   * Cannot be {@link #UNSET}. Must be a legitimate value. Used re-making the {@link BlockCacheKey} when<a name="line.703"></a>
-<span class="sourceLineNo">704</span>   * block is returned to the cache.<a name="line.704"></a>
-<span class="sourceLineNo">705</span>   * @return the offset of this block in the file it was read from<a name="line.705"></a>
-<span class="sourceLineNo">706</span>   */<a name="line.706"></a>
-<span class="sourceLineNo">707</span>  long getOffset() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    if (offset &lt; 0) {<a name="line.708"></a>
-<span class="sourceLineNo">709</span>      throw new IllegalStateException("HFile block offset not initialized properly");<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    }<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    return offset;<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  /**<a name="line.714"></a>
-<span class="sourceLineNo">715</span>   * @return a byte stream reading the data + checksum of this block<a name="line.715"></a>
-<span class="sourceLineNo">716</span>   */<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  DataInputStream getByteStream() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    ByteBuff dup = this.buf.duplicate();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    dup.position(this.headerSize());<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new DataInputStream(new ByteBuffInputStream(dup));<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>  public long heapSize() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    long size = ClassSize.align(<a name="line.725"></a>
-<span class="sourceLineNo">726</span>        ClassSize.OBJECT +<a name="line.726"></a>
-<span class="sourceLineNo">727</span>        // Block type, multi byte buffer, MemoryType and meta references<a name="line.727"></a>
-<span class="sourceLineNo">728</span>        4 * ClassSize.REFERENCE +<a name="line.728"></a>
-<span class="sourceLineNo">729</span>        // On-disk size, uncompressed size, and next block's on-disk size<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        // bytePerChecksum and onDiskDataSize<a name="line.730"></a>
-<span class="sourceLineNo">731</span>        4 * Bytes.SIZEOF_INT +<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        // This and previous block offset<a name="line.732"></a>
-<span class="sourceLineNo">733</span>        2 * Bytes.SIZEOF_LONG +<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        // Heap size of the meta object. meta will be always not null.<a name="line.734"></a>
-<span class="sourceLineNo">735</span>        fileContext.heapSize()<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    );<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    if (buf != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      // Deep overhead of the byte buffer. Needs to be aligned separately.<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    return ClassSize.align(size);<a name="line.743"></a>
-<span class="sourceLineNo">744</span>  }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>  /**<a name="line.746"></a>
-<span class="sourceLineNo">747</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.747"></a>
-<span class="sourceLineNo">748</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.748"></a>
-<span class="sourceLineNo">749</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but specifies a<a name="line.749"></a>
-<span class="sourceLineNo">750</span>   * number of "extra" bytes to also optionally read.<a name="line.750"></a>
-<span class="sourceLineNo">751</span>   *<a name="line.751"></a>
-<span class="sourceLineNo">752</span>   * @param in the input stream to read from<a name="line.752"></a>
-<span class="sourceLineNo">753</span>   * @param buf the buffer to read into<a name="line.753"></a>
-<span class="sourceLineNo">754</span>   * @param bufOffset the destination offset in the buffer<a name="line.754"></a>
-<span class="sourceLineNo">755</span>   * @param necessaryLen the number of bytes that are absolutely necessary to read<a name="line.755"></a>
-<span class="sourceLineNo">756</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.756"></a>
-<span class="sourceLineNo">757</span>   * @return true if succeeded reading the extra bytes<a name="line.757"></a>
-<span class="sourceLineNo">758</span>   * @throws IOException if failed to read the necessary bytes<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   */<a name="line.759"></a>
-<span class="sourceLineNo">760</span>  static boolean readWithExtra(InputStream in, byte[] buf,<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      int bufOffset, int necessaryLen, int extraLen) throws IOException {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    while (bytesRemaining &gt; 0) {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      int ret = in.read(buf, bufOffset, bytesRemaining);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      if (ret == -1 &amp;&amp; bytesRemaining &lt;= extraLen) {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        // We could not read the "extra data", but that is OK.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>        break;<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      }<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (ret &lt; 0) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>        throw new IOException("Premature EOF from inputStream (read "<a name="line.770"></a>
-<span class="sourceLineNo">771</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.771"></a>
-<span class="sourceLineNo">772</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.772"></a>
-<span class="sourceLineNo">773</span>            + "successfully read "<a name="line.773"></a>
-<span class="sourceLineNo">774</span>            + (necessaryLen + extraLen - bytesRemaining));<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      }<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      bufOffset += ret;<a name="line.776"></a>
-<span class="sourceLineNo">777</span>      bytesRemaining -= ret;<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    }<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    return bytesRemaining &lt;= 0;<a name="line.779"></a>
-<span class="sourceLineNo">780</span>  }<a name="line.780"></a>
-<span class="sourceLineNo">781</span><a name="line.781"></a>
-<span class="sourceLineNo">782</span>  /**<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.784"></a>
-<span class="sourceLineNo">785</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but uses<a name="line.785"></a>
-<span class="sourceLineNo">786</span>   * positional read and specifies a number of "extra" bytes that would be<a name="line.786"></a>
-<span class="sourceLineNo">787</span>   * desirable but not absolutely necessary to read.<a name="line.787"></a>
-<span class="sourceLineNo">788</span>   *<a name="line.788"></a>
-<span class="sourceLineNo">789</span>   * @param in the input stream to read from<a name="line.789"></a>
-<span class="sourceLineNo">790</span>   * @param position the position within the stream from which to start reading<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * @param buf the buffer to read into<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * @param bufOffset the destination offset in the buffer<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   * @param necessaryLen the number of bytes that are absolutely necessary to<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   *     read<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   * @return true if and only if extraLen is &gt; 0 and reading those extra bytes<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   *     was successful<a name="line.797"></a>
-<span class="sourceLineNo">798</span>   * @throws IOException if failed to read the necessary bytes<a name="line.798"></a>
-<span class="sourceLineNo">799</span>   */<a name="line.799"></a>
-<span class="sourceLineNo">800</span>  @VisibleForTesting<a name="line.800"></a>
-<span class="sourceLineNo">801</span>  static boolean positionalReadWithExtra(FSDataInputStream in,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      long position, byte[] buf, int bufOffset, int necessaryLen, int extraLen)<a name="line.802"></a>
-<span class="sourceLineNo">803</span>      throws IOException {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>    int bytesRead = 0;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    while (bytesRead &lt; necessaryLen) {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      int ret = in.read(position, buf, bufOffset, bytesRemaining);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>      if (ret &lt; 0) {<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        throw new IOException("Premature EOF from inputStream (positional read "<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            + "successfully read " + bytesRead);<a name="line.812"></a>
-<span class="sourceLineNo">813</span>      }<a name="line.813"></a>
-<span class="sourceLineNo">814</span>      position += ret;<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      bufOffset += ret;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      bytesRemaining -= ret;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      bytesRead += ret;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>    }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    return bytesRead != necessaryLen &amp;&amp; bytesRemaining &lt;= 0;<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>   * Unified version 2 {@link HFile} block writer. The intended usage pattern<a name="line.823"></a>
-<span class="sourceLineNo">824</span>   * is as follows:<a name="line.824"></a>
-<span class="sourceLineNo">825</span>   * &lt;ol&gt;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>   * &lt;li&gt;Construct an {@link HFileBlock.Writer}, providing a compression algorithm.<a name="line.826"></a>
-<span class="sourceLineNo">827</span>   * &lt;li&gt;Call {@link Writer#startWriting} and get a data stream to write to.<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * &lt;li&gt;Write your data into the stream.<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * &lt;li&gt;Call Writer#writeHeaderAndData(FSDataOutputStream) as many times as you need to.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   * store the serialized block into an external stream.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * &lt;li&gt;Repeat to write more blocks.<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   * &lt;/ol&gt;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   * &lt;p&gt;<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   */<a name="line.834"></a>
-<span class="sourceLineNo">835</span>  static class Writer {<a name="line.835"></a>
-<span class="sourceLineNo">836</span>    private enum State {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>      INIT,<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      WRITING,<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      BLOCK_READY<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
-<span class="sourceLineNo">841</span><a name="line.841"></a>
-<span class="sourceLineNo">842</span>    /** Writer state. Used to ensure the correct usage protocol. */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    private State state = State.INIT;<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>    /** Data block encoder used for data blocks */<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    private final HFileDataBlockEncoder dataBlockEncoder;<a name="line.846"></a>
-<span class="sourceLineNo">847</span><a name="line.847"></a>
-<span class="sourceLineNo">848</span>    private HFileBlockEncodingContext dataBlockEncodingCtx;<a name="line.848"></a>
-<span class="sourceLineNo">849</span><a name="line.849"></a>
-<span class="sourceLineNo">850</span>    /** block encoding context for non-data blocks*/<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;<a name="line.851"></a>
-<span class="sourceLineNo">852</span><a name="line.852"></a>
-<span class="sourceLineNo">853</span>    /**<a name="line.853"></a>
-<span class="sourceLineNo">854</span>     * The stream we use to accumulate data into a block in an uncompressed format.<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * We reset this stream at the end of each block and reuse it. The<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * header is written as the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes into this<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * stream.<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    private ByteArrayOutputStream baosInMemory;<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>    /**<a name="line.861"></a>
-<span class="sourceLineNo">862</span>     * Current block type. Set in {@link #startWriting(BlockType)}. Could be<a name="line.862"></a>
-<span class="sourceLineNo">863</span>     * changed in {@link #finishBlock()} from {@link BlockType#DATA}<a name="line.863"></a>
-<span class="sourceLineNo">864</span>     * to {@link BlockType#ENCODED_DATA}.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>     */<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    private BlockType blockType;<a name="line.866"></a>
-<span class="sourceLineNo">867</span><a name="line.867"></a>
-<span class="sourceLineNo">868</span>    /**<a name="line.868"></a>
-<span class="sourceLineNo">869</span>     * A stream that we write uncompressed bytes to, which compresses them and<a name="line.869"></a>
-<span class="sourceLineNo">870</span>     * writes them to {@link #baosInMemory}.<a name="line.870"></a>
-<span class="sourceLineNo">871</span>     */<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    private DataOutputStream userDataStream;<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // Size of actual data being written. Not considering the block encoding/compression. This<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    // includes the header size also.<a name="line.875"></a>
-<span class="sourceLineNo">876</span>    private int unencodedDataSizeWritten;<a name="line.876"></a>
+<span class="sourceLineNo">333</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.335"></a>
+<span class="sourceLineNo">336</span>   *<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * @param offset the file offset the block was read from<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * @param fileContext HFile meta data<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      HFileContext fileContext) {<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.352"></a>
+<span class="sourceLineNo">353</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    this.buf = new SingleByteBuff(b);<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    if (fillHeader) {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      overwriteHeader();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    this.buf.rewind();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * to that point.<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>    buf.rewind();<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    final BlockType blockType = BlockType.read(buf);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    final int uncompressedSizeWithoutHeader =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    int onDiskDataSizeWithHeader;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    if (usesHBaseChecksum) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    } else {<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    fileContext = fileContextBuilder.build();<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    this.memType = memType;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    this.offset = offset;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    this.buf = buf;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    this.buf.rewind();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  /**<a name="line.405"></a>
+<span class="sourceLineNo">406</span>   * Called from constructors.<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      HFileContext fileContext) {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>    this.blockType = blockType;<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    this.offset = offset;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    this.fileContext = fileContext;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * Parse total on disk size including header and checksum.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   * @return Size of the block with header included.<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      boolean verifyChecksum) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      headerSize(verifyChecksum);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.435"></a>
+<span class="sourceLineNo">436</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.436"></a>
+<span class="sourceLineNo">437</span>   * a read of the next block when scanning or running over a file.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  int getNextBlockOnDiskSize() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>    return nextBlockOnDiskSize;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  public BlockType getBlockType() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    return blockType;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  short getDataBlockEncodingId() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>    }<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    return buf.getShort(headerSize());<a name="line.454"></a>
+<span class="sourceLineNo">455</span>  }<a name="line.455"></a>
+<span class="sourceLineNo">456</span><a name="line.456"></a>
+<span class="sourceLineNo">457</span>  /**<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   * @return the on-disk size of header + data part + checksum.<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   */<a name="line.459"></a>
+<span class="sourceLineNo">460</span>  public int getOnDiskSizeWithHeader() {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.461"></a>
+<span class="sourceLineNo">462</span>  }<a name="line.462"></a>
+<span class="sourceLineNo">463</span><a name="line.463"></a>
+<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
+<span class="sourceLineNo">465</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.465"></a>
+<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  int getOnDiskSizeWithoutHeader() {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return onDiskSizeWithoutHeader;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   int getUncompressedSizeWithoutHeader() {<a name="line.474"></a>
+<span class="sourceLineNo">475</span>    return uncompressedSizeWithoutHeader;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>  }<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>  /**<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   *         -1 if unknown<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   */<a name="line.481"></a>
+<span class="sourceLineNo">482</span>  long getPrevBlockOffset() {<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    return prevBlockOffset;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /**<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * is modified as side-effect.<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   */<a name="line.489"></a>
+<span class="sourceLineNo">490</span>  private void overwriteHeader() {<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    buf.rewind();<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    blockType.write(buf);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    buf.putLong(prevBlockOffset);<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.498"></a>
+<span class="sourceLineNo">499</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    }<a name="line.500"></a>
+<span class="sourceLineNo">501</span>  }<a name="line.501"></a>
+<span class="sourceLineNo">502</span><a name="line.502"></a>
+<span class="sourceLineNo">503</

<TRUNCATED>

[21/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html
index b7b4236..3d1edb3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html
@@ -259,1863 +259,1867 @@
 <span class="sourceLineNo">251</span>   * + Metadata!  + &lt;= See note on BLOCK_METADATA_SPACE above.<a name="line.251"></a>
 <span class="sourceLineNo">252</span>   * ++++++++++++++<a name="line.252"></a>
 <span class="sourceLineNo">253</span>   * &lt;/code&gt;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer)<a name="line.254"></a>
+<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer, boolean)<a name="line.254"></a>
 <span class="sourceLineNo">255</span>   */<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER =<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    @Override<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      // The buf has the file block followed by block metadata.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      ByteBuff newByteBuff;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      if (reuse) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        newByteBuff = buf.slice();<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      } else {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        int len = buf.limit();<a name="line.269"></a>
-<span class="sourceLineNo">270</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      buf.position(buf.limit());<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      long offset = buf.getLong();<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      HFileBlock hFileBlock =<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      return hFileBlock;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    public int getDeserialiserIdentifier() {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return DESERIALIZER_IDENTIFIER;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    @Override<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      // Used only in tests<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  };<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  static {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    DESERIALIZER_IDENTIFIER =<a name="line.298"></a>
-<span class="sourceLineNo">299</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>  }<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  /**<a name="line.302"></a>
-<span class="sourceLineNo">303</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.303"></a>
-<span class="sourceLineNo">304</span>   */<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private HFileBlock(HFileBlock that) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    this(that, false);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
-<span class="sourceLineNo">308</span><a name="line.308"></a>
-<span class="sourceLineNo">309</span>  /**<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.310"></a>
-<span class="sourceLineNo">311</span>   * param.<a name="line.311"></a>
-<span class="sourceLineNo">312</span>   */<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.314"></a>
-<span class="sourceLineNo">315</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    if (bufCopy) {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    } else {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.buf = that.buf.duplicate();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    }<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * is used only while writing blocks and caching,<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   *<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.331"></a>
+<span class="sourceLineNo">256</span>  public static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER = new BlockDeserializer();<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  public static final class BlockDeserializer implements CacheableDeserializer&lt;Cacheable&gt; {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    private BlockDeserializer() {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>    @Override<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        throws IOException {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      // The buf has the file block followed by block metadata.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      ByteBuff newByteBuff;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      if (reuse) {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>        newByteBuff = buf.slice();<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      } else {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        int len = buf.limit();<a name="line.273"></a>
+<span class="sourceLineNo">274</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      buf.position(buf.limit());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      long offset = buf.getLong();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      HFileBlock hFileBlock =<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      return hFileBlock;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>    @Override<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    public int getDeserialiserIdentifier() {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      return DESERIALIZER_IDENTIFIER;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>    @Override<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      // Used only in tests<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  static {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    DESERIALIZER_IDENTIFIER =<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  }<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /**<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   */<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private HFileBlock(HFileBlock that) {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    this(that, false);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * param.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.318"></a>
+<span class="sourceLineNo">319</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    if (bufCopy) {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    } else {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>      this.buf = that.buf.duplicate();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    }<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * is used only while writing blocks and caching,<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.331"></a>
 <span class="sourceLineNo">332</span>   *<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @param offset the file offset the block was read from<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   * @param fileContext HFile meta data<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  @VisibleForTesting<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      HFileContext fileContext) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    this.buf = new SingleByteBuff(b);<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    if (fillHeader) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      overwriteHeader();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    this.buf.rewind();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  /**<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * to that point.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>    buf.rewind();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    final BlockType blockType = BlockType.read(buf);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    final int uncompressedSizeWithoutHeader =<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    int onDiskDataSizeWithHeader;<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    if (usesHBaseChecksum) {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    fileContext = fileContextBuilder.build();<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    this.memType = memType;<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    this.offset = offset;<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    this.buf = buf;<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    this.buf.rewind();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  }<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  /**<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   * Called from constructors.<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      HFileContext fileContext) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    this.blockType = blockType;<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    this.offset = offset;<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    this.fileContext = fileContext;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  /**<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * Parse total on disk size including header and checksum.<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   * @return Size of the block with header included.<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   */<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      boolean verifyChecksum) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      headerSize(verifyChecksum);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * a read of the next block when scanning or running over a file.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  int getNextBlockOnDiskSize() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    return nextBlockOnDiskSize;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  @Override<a name="line.439"></a>
-<span class="sourceLineNo">440</span>  public BlockType getBlockType() {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    return blockType;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  short getDataBlockEncodingId() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    return buf.getShort(headerSize());<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  }<a name="line.451"></a>
-<span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>  /**<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   * @return the on-disk size of header + data part + checksum.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>   */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  public int getOnDiskSizeWithHeader() {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  int getOnDiskSizeWithoutHeader() {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    return onDiskSizeWithoutHeader;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>  }<a name="line.465"></a>
-<span class="sourceLineNo">466</span><a name="line.466"></a>
-<span class="sourceLineNo">467</span>  /**<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.468"></a>
-<span class="sourceLineNo">469</span>   */<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   int getUncompressedSizeWithoutHeader() {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    return uncompressedSizeWithoutHeader;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /**<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   *         -1 if unknown<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  long getPrevBlockOffset() {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    return prevBlockOffset;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>  }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>  /**<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * is modified as side-effect.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   */<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  private void overwriteHeader() {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    buf.rewind();<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    blockType.write(buf);<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    buf.putLong(prevBlockOffset);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>  }<a name="line.497"></a>
-<span class="sourceLineNo">498</span><a name="line.498"></a>
-<span class="sourceLineNo">499</span>  /**<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * Returns a buffer that does not include the header or checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   *<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    ByteBuff dup = getBufferReadOnly();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    // Now set it up so Buffer spans content only -- no header or no checksums.<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    return dup.position(headerSize()).limit(buf.limit() - totalChecksumBytes()).slice();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Returns a read-only duplicate of the buffer this block stores internally ready to be read.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * Clients must not modify the buffer object though they may set position and limit on the<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   * returned buffer since we pass back a duplicate. This method has to be public because it is used<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * filter lookup, but has to be used with caution. Buffer holds header, block content,<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * and any follow-on checksums if present.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   *<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @return the buffer of this block for read-only operations<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   */<a name="line.519"></a>
-<span class="sourceLineNo">520</span>  public ByteBuff getBufferReadOnly() {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    // TODO: ByteBuf does not support asReadOnlyBuffer(). Fix.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    ByteBuff dup = this.buf.duplicate();<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    assert dup.position() == 0;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return dup;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  @VisibleForTesting<a name="line.527"></a>
-<span class="sourceLineNo">528</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String fieldName) throws IOException {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    if (valueFromBuf != valueFromField) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  @VisibleForTesting<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      throws IOException {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    if (valueFromBuf != valueFromField) {<a name="line.539"></a>
-<span class="sourceLineNo">540</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.540"></a>
-<span class="sourceLineNo">541</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  }<a name="line.543"></a>
-<span class="sourceLineNo">544</span><a name="line.544"></a>
-<span class="sourceLineNo">545</span>  /**<a name="line.545"></a>
-<span class="sourceLineNo">546</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.547"></a>
-<span class="sourceLineNo">548</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * This function is primary for testing and debugging, and is not<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * Used by tests only.<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   */<a name="line.552"></a>
-<span class="sourceLineNo">553</span>  @VisibleForTesting<a name="line.553"></a>
-<span class="sourceLineNo">554</span>  void sanityCheck() throws IOException {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>    // Duplicate so no side-effects<a name="line.555"></a>
-<span class="sourceLineNo">556</span>    ByteBuff dup = this.buf.duplicate().rewind();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    sanityCheckAssertion(BlockType.read(dup), blockType);<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    sanityCheckAssertion(dup.getInt(), onDiskSizeWithoutHeader, "onDiskSizeWithoutHeader");<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    sanityCheckAssertion(dup.getInt(), uncompressedSizeWithoutHeader,<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        "uncompressedSizeWithoutHeader");<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    sanityCheckAssertion(dup.getLong(), prevBlockOffset, "prevBlockOffset");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      sanityCheckAssertion(dup.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      sanityCheckAssertion(dup.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.567"></a>
-<span class="sourceLineNo">568</span>          "bytesPerChecksum");<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      sanityCheckAssertion(dup.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
-<span class="sourceLineNo">571</span><a name="line.571"></a>
-<span class="sourceLineNo">572</span>    int cksumBytes = totalChecksumBytes();<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (dup.limit() != expectedBufLimit) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      throw new AssertionError("Expected limit " + expectedBufLimit + ", got " + dup.limit());<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    int hdrSize = headerSize();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    if (dup.capacity() != expectedBufLimit &amp;&amp; dup.capacity() != expectedBufLimit + hdrSize) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      throw new AssertionError("Invalid buffer capacity: " + dup.capacity() +<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  @Override<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  public String toString() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    StringBuilder sb = new StringBuilder()<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      .append("[")<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      .append("blockType=").append(blockType)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      .append(", fileOffset=").append(offset)<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      .append(", headerSize=").append(headerSize())<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      .append(", onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      .append(", uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      .append(", prevBlockOffset=").append(prevBlockOffset)<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      .append(", isUseHBaseChecksum=").append(fileContext.isUseHBaseChecksum());<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      sb.append(", checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.599"></a>
-<span class="sourceLineNo">600</span>        .append(", bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        .append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      sb.append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    String dataBegin = null;<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    if (buf.hasArray()) {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    } else {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.613"></a>
-<span class="sourceLineNo">614</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>    sb.append(", getOnDiskSizeWithHeader=").append(getOnDiskSizeWithHeader())<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      .append(", totalChecksumBytes=").append(totalChecksumBytes())<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      .append(", isUnpacked=").append(isUnpacked())<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      .append(", buf=[").append(buf).append("]")<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      .append(", dataBeginsWith=").append(dataBegin)<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      .append(", fileContext=").append(fileContext)<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      .append(", nextBlockOnDiskSize=").append(nextBlockOnDiskSize)<a name="line.624"></a>
-<span class="sourceLineNo">625</span>      .append("]");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    return sb.toString();<a name="line.626"></a>
-<span class="sourceLineNo">627</span>  }<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>  /**<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      // encryption details.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();<a name="line.645"></a>
-<span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>    ByteBuff dup = this.buf.duplicate();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    dup.position(this.headerSize());<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    dup = dup.slice();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      dup);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    return unpacked;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  /**<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   * Always allocates a new buffer of the correct size. Copies header bytes<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * from the existing buffer. Does not change header fields.<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   * Reserve room to keep checksum bytes too.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  private void allocateBuffer() {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    int cksumBytes = totalChecksumBytes();<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    int headerSize = headerSize();<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    int capacityNeeded = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // TODO we need consider allocating offheap here?<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);<a name="line.667"></a>
-<span class="sourceLineNo">668</span><a name="line.668"></a>
-<span class="sourceLineNo">669</span>    // Copy header bytes into newBuf.<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    // newBuf is HBB so no issue in calling array()<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    buf.position(0);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    buf.get(newBuf.array(), newBuf.arrayOffset(), headerSize);<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    buf = new SingleByteBuff(newBuf);<a name="line.674"></a>
-<span class="sourceLineNo">675</span>    // set limit to exclude next block's header<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * Return true when this block's buffer has been unpacked, false otherwise. Note this is a<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   * calculated heuristic, not tracked attribute of the block.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>   */<a name="line.682"></a>
-<span class="sourceLineNo">683</span>  public boolean isUnpacked() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>    final int cksumBytes = totalChecksumBytes();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    final int headerSize = headerSize();<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    final int bufCapacity = buf.capacity();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;<a name="line.688"></a>
-<span class="sourceLineNo">689</span>  }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>  /** An additional sanity-check in case no compression or encryption is being used. */<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  @VisibleForTesting<a name="line.692"></a>
-<span class="sourceLineNo">693</span>  void sanityCheckUncompressedSize() throws IOException {<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader + totalChecksumBytes()) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      throw new IOException("Using no compression but "<a name="line.695"></a>
-<span class="sourceLineNo">696</span>          + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "<a name="line.696"></a>
-<span class="sourceLineNo">697</span>          + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader<a name="line.697"></a>
-<span class="sourceLineNo">698</span>          + ", numChecksumbytes=" + totalChecksumBytes());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    }<a name="line.699"></a>
-<span class="sourceLineNo">700</span>  }<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>  /**<a name="line.702"></a>
-<span class="sourceLineNo">703</span>   * Cannot be {@link #UNSET}. Must be a legitimate value. Used re-making the {@link BlockCacheKey} when<a name="line.703"></a>
-<span class="sourceLineNo">704</span>   * block is returned to the cache.<a name="line.704"></a>
-<span class="sourceLineNo">705</span>   * @return the offset of this block in the file it was read from<a name="line.705"></a>
-<span class="sourceLineNo">706</span>   */<a name="line.706"></a>
-<span class="sourceLineNo">707</span>  long getOffset() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    if (offset &lt; 0) {<a name="line.708"></a>
-<span class="sourceLineNo">709</span>      throw new IllegalStateException("HFile block offset not initialized properly");<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    }<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    return offset;<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  /**<a name="line.714"></a>
-<span class="sourceLineNo">715</span>   * @return a byte stream reading the data + checksum of this block<a name="line.715"></a>
-<span class="sourceLineNo">716</span>   */<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  DataInputStream getByteStream() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    ByteBuff dup = this.buf.duplicate();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    dup.position(this.headerSize());<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new DataInputStream(new ByteBuffInputStream(dup));<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>  public long heapSize() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    long size = ClassSize.align(<a name="line.725"></a>
-<span class="sourceLineNo">726</span>        ClassSize.OBJECT +<a name="line.726"></a>
-<span class="sourceLineNo">727</span>        // Block type, multi byte buffer, MemoryType and meta references<a name="line.727"></a>
-<span class="sourceLineNo">728</span>        4 * ClassSize.REFERENCE +<a name="line.728"></a>
-<span class="sourceLineNo">729</span>        // On-disk size, uncompressed size, and next block's on-disk size<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        // bytePerChecksum and onDiskDataSize<a name="line.730"></a>
-<span class="sourceLineNo">731</span>        4 * Bytes.SIZEOF_INT +<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        // This and previous block offset<a name="line.732"></a>
-<span class="sourceLineNo">733</span>        2 * Bytes.SIZEOF_LONG +<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        // Heap size of the meta object. meta will be always not null.<a name="line.734"></a>
-<span class="sourceLineNo">735</span>        fileContext.heapSize()<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    );<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    if (buf != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      // Deep overhead of the byte buffer. Needs to be aligned separately.<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    return ClassSize.align(size);<a name="line.743"></a>
-<span class="sourceLineNo">744</span>  }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>  /**<a name="line.746"></a>
-<span class="sourceLineNo">747</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.747"></a>
-<span class="sourceLineNo">748</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.748"></a>
-<span class="sourceLineNo">749</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but specifies a<a name="line.749"></a>
-<span class="sourceLineNo">750</span>   * number of "extra" bytes to also optionally read.<a name="line.750"></a>
-<span class="sourceLineNo">751</span>   *<a name="line.751"></a>
-<span class="sourceLineNo">752</span>   * @param in the input stream to read from<a name="line.752"></a>
-<span class="sourceLineNo">753</span>   * @param buf the buffer to read into<a name="line.753"></a>
-<span class="sourceLineNo">754</span>   * @param bufOffset the destination offset in the buffer<a name="line.754"></a>
-<span class="sourceLineNo">755</span>   * @param necessaryLen the number of bytes that are absolutely necessary to read<a name="line.755"></a>
-<span class="sourceLineNo">756</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.756"></a>
-<span class="sourceLineNo">757</span>   * @return true if succeeded reading the extra bytes<a name="line.757"></a>
-<span class="sourceLineNo">758</span>   * @throws IOException if failed to read the necessary bytes<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   */<a name="line.759"></a>
-<span class="sourceLineNo">760</span>  static boolean readWithExtra(InputStream in, byte[] buf,<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      int bufOffset, int necessaryLen, int extraLen) throws IOException {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    while (bytesRemaining &gt; 0) {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      int ret = in.read(buf, bufOffset, bytesRemaining);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      if (ret == -1 &amp;&amp; bytesRemaining &lt;= extraLen) {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        // We could not read the "extra data", but that is OK.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>        break;<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      }<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (ret &lt; 0) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>        throw new IOException("Premature EOF from inputStream (read "<a name="line.770"></a>
-<span class="sourceLineNo">771</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.771"></a>
-<span class="sourceLineNo">772</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.772"></a>
-<span class="sourceLineNo">773</span>            + "successfully read "<a name="line.773"></a>
-<span class="sourceLineNo">774</span>            + (necessaryLen + extraLen - bytesRemaining));<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      }<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      bufOffset += ret;<a name="line.776"></a>
-<span class="sourceLineNo">777</span>      bytesRemaining -= ret;<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    }<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    return bytesRemaining &lt;= 0;<a name="line.779"></a>
-<span class="sourceLineNo">780</span>  }<a name="line.780"></a>
-<span class="sourceLineNo">781</span><a name="line.781"></a>
-<span class="sourceLineNo">782</span>  /**<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.784"></a>
-<span class="sourceLineNo">785</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but uses<a name="line.785"></a>
-<span class="sourceLineNo">786</span>   * positional read and specifies a number of "extra" bytes that would be<a name="line.786"></a>
-<span class="sourceLineNo">787</span>   * desirable but not absolutely necessary to read.<a name="line.787"></a>
-<span class="sourceLineNo">788</span>   *<a name="line.788"></a>
-<span class="sourceLineNo">789</span>   * @param in the input stream to read from<a name="line.789"></a>
-<span class="sourceLineNo">790</span>   * @param position the position within the stream from which to start reading<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * @param buf the buffer to read into<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * @param bufOffset the destination offset in the buffer<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   * @param necessaryLen the number of bytes that are absolutely necessary to<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   *     read<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   * @return true if and only if extraLen is &gt; 0 and reading those extra bytes<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   *     was successful<a name="line.797"></a>
-<span class="sourceLineNo">798</span>   * @throws IOException if failed to read the necessary bytes<a name="line.798"></a>
-<span class="sourceLineNo">799</span>   */<a name="line.799"></a>
-<span class="sourceLineNo">800</span>  @VisibleForTesting<a name="line.800"></a>
-<span class="sourceLineNo">801</span>  static boolean positionalReadWithExtra(FSDataInputStream in,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      long position, byte[] buf, int bufOffset, int necessaryLen, int extraLen)<a name="line.802"></a>
-<span class="sourceLineNo">803</span>      throws IOException {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>    int bytesRead = 0;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    while (bytesRead &lt; necessaryLen) {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      int ret = in.read(position, buf, bufOffset, bytesRemaining);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>      if (ret &lt; 0) {<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        throw new IOException("Premature EOF from inputStream (positional read "<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            + "successfully read " + bytesRead);<a name="line.812"></a>
-<span class="sourceLineNo">813</span>      }<a name="line.813"></a>
-<span class="sourceLineNo">814</span>      position += ret;<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      bufOffset += ret;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      bytesRemaining -= ret;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      bytesRead += ret;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>    }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    return bytesRead != necessaryLen &amp;&amp; bytesRemaining &lt;= 0;<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>   * Unified version 2 {@link HFile} block writer. The intended usage pattern<a name="line.823"></a>
-<span class="sourceLineNo">824</span>   * is as follows:<a name="line.824"></a>
-<span class="sourceLineNo">825</span>   * &lt;ol&gt;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>   * &lt;li&gt;Construct an {@link HFileBlock.Writer}, providing a compression algorithm.<a name="line.826"></a>
-<span class="sourceLineNo">827</span>   * &lt;li&gt;Call {@link Writer#startWriting} and get a data stream to write to.<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * &lt;li&gt;Write your data into the stream.<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * &lt;li&gt;Call Writer#writeHeaderAndData(FSDataOutputStream) as many times as you need to.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   * store the serialized block into an external stream.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * &lt;li&gt;Repeat to write more blocks.<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   * &lt;/ol&gt;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   * &lt;p&gt;<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   */<a name="line.834"></a>
-<span class="sourceLineNo">835</span>  static class Writer {<a name="line.835"></a>
-<span class="sourceLineNo">836</span>    private enum State {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>      INIT,<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      WRITING,<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      BLOCK_READY<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
-<span class="sourceLineNo">841</span><a name="line.841"></a>
-<span class="sourceLineNo">842</span>    /** Writer state. Used to ensure the correct usage protocol. */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    private State state = State.INIT;<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>    /** Data block encoder used for data blocks */<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    private final HFileDataBlockEncoder dataBlockEncoder;<a name="line.846"></a>
-<span class="sourceLineNo">847</span><a name="line.847"></a>
-<span class="sourceLineNo">848</span>    private HFileBlockEncodingContext dataBlockEncodingCtx;<a name="line.848"></a>
-<span class="sourceLineNo">849</span><a name="line.849"></a>
-<span class="sourceLineNo">850</span>    /** block encoding context for non-data blocks*/<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;<a name="line.851"></a>
-<span class="sourceLineNo">852</span><a name="line.852"></a>
-<span class="sourceLineNo">853</span>    /**<a name="line.853"></a>
-<span class="sourceLineNo">854</span>     * The stream we use to accumulate data into a block in an uncompressed format.<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * We reset this stream at the end of each block and reuse it. The<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * header is written as the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes into this<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * stream.<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    private ByteArrayOutputStream baosInMemory;<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>    /**<a name="line.861"></a>
-<span class="sourceLineNo">862</span>     * Current block type. Set in {@link #startWriting(BlockType)}. Could be<a name="line.862"></a>
-<span class="sourceLineNo">863</span>     * changed in {@link #finishBlock()} from {@link BlockType#DATA}<a name="line.863"></a>
-<span class="sourceLineNo">864</span>     * to {@link BlockType#ENCODED_DATA}.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>     */<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    private BlockType blockType;<a name="line.866"></a>
-<span class="sourceLineNo">867</span><a name="line.867"></a>
-<span class="sourceLineNo">868</span>    /**<a name="line.868"></a>
-<span class="sourceLineNo">869</span>     * A stream that we write uncompressed bytes to, which compresses them and<a name="line.869"></a>
-<span class="sourceLineNo">870</span>     * writes them to {@link #baosInMemory}.<a name="line.870"></a>
-<span class="sourceLineNo">871</span>     */<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    private DataOutputStream userDataStream;<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // Size of actual data being written. Not considering the block encoding/compression. This<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    // includes the header size also.<a name="line.875"></a>
-<span class="sourceLineNo">876</span>    private int unencodedDataSizeWritten;<a name="line.876"></a>
+<span class="sourceLineNo">333</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.335"></a>
+<span class="sourceLineNo">336</span>   *<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * @param offset the file offset the block was read from<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * @param fileContext HFile meta data<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      HFileContext fileContext) {<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.352"></a>
+<span class="sourceLineNo">353</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    this.buf = new SingleByteBuff(b);<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    if (fillHeader) {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      overwriteHeader();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    this.buf.rewind();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * to that point.<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>    buf.rewind();<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    final BlockType blockType = BlockType.read(buf);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    final int uncompressedSizeWithoutHeader =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    int onDiskDataSizeWithHeader;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    if (usesHBaseChecksum) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    } else {<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    fileContext = fileContextBuilder.build();<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    this.memType = memType;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    this.offset = offset;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    this.buf = buf;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    this.buf.rewind();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  /**<a name="line.405"></a>
+<span class="sourceLineNo">406</span>   * Called from constructors.<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      HFileContext fileContext) {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>    this.blockType = blockType;<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    this.offset = offset;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    this.fileContext = fileContext;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * Parse total on disk size including header and checksum.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   * @return Size of the block with header included.<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      boolean verifyChecksum) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      headerSize(verifyChecksum);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.435"></a>
+<span class="sourceLineNo">436</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.436"></a>
+<span class="sourceLineNo">437</span>   * a read of the next block when scanning or running over a file.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  int getNextBlockOnDiskSize() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>    return nextBlockOnDiskSize;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  public BlockType getBlockType() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    return blockType;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  short getDataBlockEncodingId() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>    }<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    return buf.getShort(headerSize());<a name="line.454"></a>
+<span class="sourceLineNo">455</span>  }<a name="line.455"></a>
+<span class="sourceLineNo">456</span><a name="line.456"></a>
+<span class="sourceLineNo">457</span>  /**<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   * @return the on-disk size of header + data part + checksum.<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   */<a name="line.459"></a>
+<span class="sourceLineNo">460</span>  public int getOnDiskSizeWithHeader() {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.461"></a>
+<span class="sourceLineNo">462</span>  }<a name="line.462"></a>
+<span class="sourceLineNo">463</span><a name="line.463"></a>
+<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
+<span class="sourceLineNo">465</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.465"></a>
+<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  int getOnDiskSizeWithoutHeader() {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return onDiskSizeWithoutHeader;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   int getUncompressedSizeWithoutHeader() {<a name="line.474"></a>
+<span class="sourceLineNo">475</span>    return uncompressedSizeWithoutHeader;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>  }<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>  /**<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   *         -1 if unknown<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   */<a name="line.481"></a>
+<span class="sourceLineNo">482</span>  long getPrevBlockOffset() {<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    return prevBlockOffset;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /**<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * is modified as side-effect.<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   */<a name="line.489"></a>
+<span class="sourceLineNo">490</span>  private void overwriteHeader() {<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    buf.rewind();<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    blockType.write(buf);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    buf.putLong(prevBlockOffset);<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.498"></a>
+<span class="sourceLineNo">499</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    }<a name="line.500"></a>
+<span class="sourceLineNo">501</span>  }<a name="line.501"></a>
+<span class="sourceLineNo">502</span><a name="line.502"></a>
+<span class="sourceLineNo">503</span>  /**<a name="line.503"></a>
+<span class="sourceLineNo">504

<TRUNCATED>

[20/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
index bd3c59e..21e240a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
@@ -33,62 +33,62 @@
 <span class="sourceLineNo">025</span>import java.io.FileNotFoundException;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.io.FileOutputStream;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.io.IOException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.io.ObjectInputStream;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.io.ObjectOutputStream;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.io.Serializable;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import java.nio.ByteBuffer;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.util.ArrayList;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.util.Comparator;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.util.HashSet;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.util.Iterator;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import java.util.List;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import java.util.Map;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import java.util.NavigableSet;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import java.util.PriorityQueue;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import java.util.Set;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import java.util.concurrent.BlockingQueue;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import java.util.concurrent.ConcurrentMap;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.TimeUnit;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import java.util.concurrent.atomic.LongAdder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import java.util.concurrent.locks.Lock;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.conf.Configuration;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.util.StringUtils;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.slf4j.Logger;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.slf4j.LoggerFactory;<a name="line.79"></a>
-<span class="sourceLineNo">080</span><a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.83"></a>
+<span class="sourceLineNo">028</span>import java.io.Serializable;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.nio.ByteBuffer;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.ArrayList;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.Comparator;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import java.util.HashSet;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.Iterator;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.List;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.Map;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.NavigableSet;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import java.util.PriorityQueue;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import java.util.Set;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import java.util.concurrent.BlockingQueue;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import java.util.concurrent.ConcurrentMap;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import java.util.concurrent.Executors;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.TimeUnit;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.LongAdder;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.locks.Lock;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.conf.Configuration;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.protobuf.ProtobufMagic;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.util.StringUtils;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.Logger;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.slf4j.LoggerFactory;<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos;<a name="line.83"></a>
 <span class="sourceLineNo">084</span><a name="line.84"></a>
 <span class="sourceLineNo">085</span>/**<a name="line.85"></a>
 <span class="sourceLineNo">086</span> * BucketCache uses {@link BucketAllocator} to allocate/free blocks, and uses<a name="line.86"></a>
@@ -172,1540 +172,1557 @@
 <span class="sourceLineNo">164</span>  private volatile boolean freeInProgress = false;<a name="line.164"></a>
 <span class="sourceLineNo">165</span>  private final Lock freeSpaceLock = new ReentrantLock();<a name="line.165"></a>
 <span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  private UniqueIndexMap&lt;Integer&gt; deserialiserMap = new UniqueIndexMap&lt;&gt;();<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  private final LongAdder heapSize = new LongAdder();<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  /** Current number of cached elements */<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.172"></a>
-<span class="sourceLineNo">173</span><a name="line.173"></a>
-<span class="sourceLineNo">174</span>  /** Cache access count (sequential ID) */<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  boolean wait_when_cache = false;<a name="line.181"></a>
+<span class="sourceLineNo">167</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  private final LongAdder heapSize = new LongAdder();<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  /** Current number of cached elements */<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>  /** Cache access count (sequential ID) */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.176"></a>
+<span class="sourceLineNo">177</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.177"></a>
+<span class="sourceLineNo">178</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.178"></a>
+<span class="sourceLineNo">179</span>  boolean wait_when_cache = false;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.181"></a>
 <span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  private final String persistencePath;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  private final long cacheCapacity;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /** Approximate block size */<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  private final long blockSize;<a name="line.188"></a>
-<span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  private final int ioErrorsTolerationDuration;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  // 1 min<a name="line.192"></a>
-<span class="sourceLineNo">193</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.193"></a>
-<span class="sourceLineNo">194</span><a name="line.194"></a>
-<span class="sourceLineNo">195</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  // reset after a successful read/write.<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private volatile long ioErrorStartTime = -1;<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * &lt;p&gt;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   */<a name="line.204"></a>
-<span class="sourceLineNo">205</span>  @VisibleForTesting<a name="line.205"></a>
-<span class="sourceLineNo">206</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        @Override<a name="line.210"></a>
-<span class="sourceLineNo">211</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.212"></a>
-<span class="sourceLineNo">213</span>          if (nameComparison != 0) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            return nameComparison;<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>          if (a.getOffset() == b.getOffset()) {<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            return 0;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            return -1;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>          }<a name="line.221"></a>
-<span class="sourceLineNo">222</span>          return 1;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      });<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.226"></a>
-<span class="sourceLineNo">227</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  // Allocate or free space for the block<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private BucketAllocator bucketAllocator;<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  private float acceptableFactor;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private float minFactor;<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private float extraFreeFactor;<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /** Single access bucket size */<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  private float singleFactor;<a name="line.243"></a>
-<span class="sourceLineNo">244</span><a name="line.244"></a>
-<span class="sourceLineNo">245</span>  /** Multiple access bucket size */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private float multiFactor;<a name="line.246"></a>
-<span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  /** In-memory bucket size */<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private float memoryFactor;<a name="line.249"></a>
-<span class="sourceLineNo">250</span><a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      IOException {<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  }<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.258"></a>
-<span class="sourceLineNo">259</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.259"></a>
-<span class="sourceLineNo">260</span>                     Configuration conf)<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      throws FileNotFoundException, IOException {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    long blockNumCapacity = capacity / blockSize;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      // Enough for about 32TB of cache!<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
-<span class="sourceLineNo">269</span><a name="line.269"></a>
-<span class="sourceLineNo">270</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.275"></a>
+<span class="sourceLineNo">183</span>  private final String persistencePath;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  private final long cacheCapacity;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  /** Approximate block size */<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  private final long blockSize;<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  private final int ioErrorsTolerationDuration;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  // 1 min<a name="line.190"></a>
+<span class="sourceLineNo">191</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  // reset after a successful read/write.<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  private volatile long ioErrorStartTime = -1;<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;p&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   */<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  @VisibleForTesting<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.207"></a>
+<span class="sourceLineNo">208</span>        @Override<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.210"></a>
+<span class="sourceLineNo">211</span>          if (nameComparison != 0) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>            return nameComparison;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>          }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>          if (a.getOffset() == b.getOffset()) {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>            return 0;<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>            return -1;<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>          return 1;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        }<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      });<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.224"></a>
+<span class="sourceLineNo">225</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>  // Allocate or free space for the block<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private BucketAllocator bucketAllocator;<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  private float acceptableFactor;<a name="line.232"></a>
+<span class="sourceLineNo">233</span><a name="line.233"></a>
+<span class="sourceLineNo">234</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  private float minFactor;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private float extraFreeFactor;<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>  /** Single access bucket size */<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  private float singleFactor;<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /** Multiple access bucket size */<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private float multiFactor;<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>  /** In-memory bucket size */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private float memoryFactor;<a name="line.247"></a>
+<span class="sourceLineNo">248</span><a name="line.248"></a>
+<span class="sourceLineNo">249</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      IOException {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  }<a name="line.254"></a>
+<span class="sourceLineNo">255</span><a name="line.255"></a>
+<span class="sourceLineNo">256</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.256"></a>
+<span class="sourceLineNo">257</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.257"></a>
+<span class="sourceLineNo">258</span>                     Configuration conf)<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      throws FileNotFoundException, IOException {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    long blockNumCapacity = capacity / blockSize;<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      // Enough for about 32TB of cache!<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
+<span class="sourceLineNo">267</span><a name="line.267"></a>
+<span class="sourceLineNo">268</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>    sanityCheckConfigs();<a name="line.275"></a>
 <span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    sanityCheckConfigs();<a name="line.277"></a>
-<span class="sourceLineNo">278</span><a name="line.278"></a>
-<span class="sourceLineNo">279</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.280"></a>
-<span class="sourceLineNo">281</span>        ", memoryFactor: " + memoryFactor);<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    this.cacheCapacity = capacity;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    this.persistencePath = persistencePath;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    this.blockSize = blockSize;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.286"></a>
-<span class="sourceLineNo">287</span><a name="line.287"></a>
-<span class="sourceLineNo">288</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>    assert writerQueues.size() == writerThreads.length;<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.294"></a>
+<span class="sourceLineNo">277</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.277"></a>
+<span class="sourceLineNo">278</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.278"></a>
+<span class="sourceLineNo">279</span>        ", memoryFactor: " + memoryFactor);<a name="line.279"></a>
+<span class="sourceLineNo">280</span><a name="line.280"></a>
+<span class="sourceLineNo">281</span>    this.cacheCapacity = capacity;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    this.persistencePath = persistencePath;<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    this.blockSize = blockSize;<a name="line.283"></a>
+<span class="sourceLineNo">284</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    }<a name="line.289"></a>
+<span class="sourceLineNo">290</span><a name="line.290"></a>
+<span class="sourceLineNo">291</span>    assert writerQueues.size() == writerThreads.length;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.294"></a>
 <span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        retrieveFromFile(bucketSizes);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      } catch (IOException ioex) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        LOG.error("Can't restore from file because of", ioex);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      } catch (ClassNotFoundException cnfe) {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        LOG.error("Can't restore from file in rebuild because can't deserialise",cnfe);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        throw new RuntimeException(cnfe);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    }<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    final String threadName = Thread.currentThread().getName();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    this.cacheEnabled = true;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      writerThreads[i].setDaemon(true);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    startWriterThreads();<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    // every five minutes.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.320"></a>
-<span class="sourceLineNo">321</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  }<a name="line.327"></a>
-<span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  private void sanityCheckConfigs() {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.337"></a>
-<span class="sourceLineNo">338</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  }<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * starting the threads.<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  @VisibleForTesting<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected void startWriterThreads() {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    for (WriterThread thread : writerThreads) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>      thread.start();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">296</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      try {<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        retrieveFromFile(bucketSizes);<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      } catch (IOException ioex) {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>        LOG.error("Can't restore from file[" + persistencePath + "] because of ", ioex);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    }<a name="line.302"></a>
+<span class="sourceLineNo">303</span>    final String threadName = Thread.currentThread().getName();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    this.cacheEnabled = true;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.306"></a>
+<span class="sourceLineNo">307</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>      writerThreads[i].setDaemon(true);<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    startWriterThreads();<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    // every five minutes.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.315"></a>
+<span class="sourceLineNo">316</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.317"></a>
+<span class="sourceLineNo">318</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.318"></a>
+<span class="sourceLineNo">319</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
+<span class="sourceLineNo">323</span><a name="line.323"></a>
+<span class="sourceLineNo">324</span>  private void sanityCheckConfigs() {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.325"></a>
+<span class="sourceLineNo">326</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.327"></a>
+<span class="sourceLineNo">328</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.332"></a>
+<span class="sourceLineNo">333</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.333"></a>
+<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * starting the threads.<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   */<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  @VisibleForTesting<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  protected void startWriterThreads() {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    for (WriterThread thread : writerThreads) {<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      thread.start();<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  }<a name="line.345"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  boolean isCacheEnabled() {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    return this.cacheEnabled;<a name="line.349"></a>
 <span class="sourceLineNo">350</span>  }<a name="line.350"></a>
 <span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>  @VisibleForTesting<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  boolean isCacheEnabled() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    return this.cacheEnabled;<a name="line.354"></a>
+<span class="sourceLineNo">352</span>  @Override<a name="line.352"></a>
+<span class="sourceLineNo">353</span>  public long getMaxSize() {<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    return this.cacheCapacity;<a name="line.354"></a>
 <span class="sourceLineNo">355</span>  }<a name="line.355"></a>
 <span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>  public long getMaxSize() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    return this.cacheCapacity;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  public String getIoEngine() {<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return ioEngine.toString();<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Get the IOEngine from the IO engine name<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   * @param ioEngineName<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param capacity<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   * @param persistencePath<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @return the IOEngine<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   * @throws IOException<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
-<span class="sourceLineNo">374</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      throws IOException {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      // the compatibility<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return new ByteBufferIOEngine(capacity);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    } else {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      throw new IllegalArgumentException(<a name="line.388"></a>
-<span class="sourceLineNo">389</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Cache the block with the specified name and buffer.<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   * @param cacheKey block's cache key<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param buf block buffer<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   */<a name="line.397"></a>
-<span class="sourceLineNo">398</span>  @Override<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    cacheBlock(cacheKey, buf, false);<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  }<a name="line.401"></a>
-<span class="sourceLineNo">402</span><a name="line.402"></a>
-<span class="sourceLineNo">403</span>  /**<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * Cache the block with the specified name and buffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   * @param cacheKey block's cache key<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @param cachedItem block buffer<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   * @param inMemory if block is in-memory<a name="line.407"></a>
-<span class="sourceLineNo">408</span>   */<a name="line.408"></a>
-<span class="sourceLineNo">409</span>  @Override<a name="line.409"></a>
-<span class="sourceLineNo">410</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * Cache the block to ramCache<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   * @param cacheKey block's cache key<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @param cachedItem block buffer<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @param inMemory if block is in-memory<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * @param wait if true, blocking wait when queue is full<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.421"></a>
-<span class="sourceLineNo">422</span>      boolean wait) {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    if (cacheEnabled) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>      } else {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      boolean inMemory, boolean wait) {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    if (!cacheEnabled) {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      return;<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    }<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.439"></a>
-<span class="sourceLineNo">440</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    RAMQueueEntry re =<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.447"></a>
-<span class="sourceLineNo">448</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.448"></a>
-<span class="sourceLineNo">449</span>     */<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      return;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    boolean successfulAddition = false;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    if (wait) {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      try {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.458"></a>
-<span class="sourceLineNo">459</span>      } catch (InterruptedException e) {<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        Thread.currentThread().interrupt();<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    } else {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>      successfulAddition = bq.offer(re);<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    }<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    if (!successfulAddition) {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>      ramCache.remove(cacheKey);<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      cacheStats.failInsert();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    } else {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      this.blockNumber.increment();<a name="line.469"></a>
-<span class="sourceLineNo">470</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      blocksByHFile.add(cacheKey);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    }<a name="line.472"></a>
-<span class="sourceLineNo">473</span>  }<a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Get the buffer of the block with the specified key.<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   * @param key block's cache key<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   */<a name="line.482"></a>
-<span class="sourceLineNo">483</span>  @Override<a name="line.483"></a>
-<span class="sourceLineNo">484</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      boolean updateCacheMetrics) {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    if (!cacheEnabled) {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>      return null;<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    }<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    if (re != null) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      if (updateCacheMetrics) {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      }<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      re.access(accessCount.incrementAndGet());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      return re.getData();<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    if (bucketEntry != null) {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      long start = System.nanoTime();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      try {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        lock.readLock().lock();<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // existence here.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>          // TODO : change this area - should be removed after server cells and<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // 12295 are available<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          int len = bucketEntry.getLength();<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          if (LOG.isTraceEnabled()) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.511"></a>
-<span class="sourceLineNo">512</span>          }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.513"></a>
-<span class="sourceLineNo">514</span>              bucketEntry.deserializerReference(this.deserialiserMap));<a name="line.514"></a>
-<span class="sourceLineNo">515</span>          long timeTaken = System.nanoTime() - start;<a name="line.515"></a>
-<span class="sourceLineNo">516</span>          if (updateCacheMetrics) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.517"></a>
-<span class="sourceLineNo">518</span>            cacheStats.ioHit(timeTaken);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>            bucketEntry.incrementRefCountAndGet();<a name="line.521"></a>
-<span class="sourceLineNo">522</span>          }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.523"></a>
-<span class="sourceLineNo">524</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>            ioErrorStartTime = -1;<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          }<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          return cachedBlock;<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        }<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      } catch (IOException ioex) {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        LOG.error("Failed reading block " + key + " from bucket cache", ioex);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>        checkIOErrorIsTolerated();<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      } finally {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        lock.readLock().unlock();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      }<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>    if (!repeat &amp;&amp; updateCacheMetrics) {<a name="line.536"></a>
-<span class="sourceLineNo">537</span>      cacheStats.miss(caching, key.isPrimary(), key.getBlockType());<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    }<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    return null;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  }<a name="line.540"></a>
-<span class="sourceLineNo">541</span><a name="line.541"></a>
-<span class="sourceLineNo">542</span>  @VisibleForTesting<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  void blockEvicted(BlockCacheKey cacheKey, BucketEntry bucketEntry, boolean decrementBlockNumber) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    bucketAllocator.freeBlock(bucketEntry.offset());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    realCacheSize.add(-1 * bucketEntry.getLength());<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    blocksByHFile.remove(cacheKey);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    if (decrementBlockNumber) {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.blockNumber.decrement();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
+<span class="sourceLineNo">357</span>  public String getIoEngine() {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    return ioEngine.toString();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Get the IOEngine from the IO engine name<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * @param ioEngineName<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * @param capacity<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * @param persistencePath<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return the IOEngine<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * @throws IOException<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throws IOException {<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.372"></a>
+<span class="sourceLineNo">373</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      // the compatibility<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.375"></a>
+<span class="sourceLineNo">376</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.378"></a>
+<span class="sourceLineNo">379</span>      return new ByteBufferIOEngine(capacity);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    } else {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      throw new IllegalArgumentException(<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  }<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * Cache the block with the specified name and buffer.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @param cacheKey block's cache key<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   * @param buf block buffer<a name="line.391"></a>
+<span class="sourceLineNo">392</span>   */<a name="line.392"></a>
+<span class="sourceLineNo">393</span>  @Override<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    cacheBlock(cacheKey, buf, false);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   * Cache the block with the specified name and buffer.<a name="line.399"></a>
+<span class="sourceLineNo">400</span>   * @param cacheKey block's cache key<a name="line.400"></a>
+<span class="sourceLineNo">401</span>   * @param cachedItem block buffer<a name="line.401"></a>
+<span class="sourceLineNo">402</span>   * @param inMemory if block is in-memory<a name="line.402"></a>
+<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  @Override<a name="line.404"></a>
+<span class="sourceLineNo">405</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * Cache the block to ramCache<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   * @param cacheKey block's cache key<a name="line.411"></a>
+<span class="sourceLineNo">412</span>   * @param cachedItem block buffer<a name="line.412"></a>
+<span class="sourceLineNo">413</span>   * @param inMemory if block is in-memory<a name="line.413"></a>
+<span class="sourceLineNo">414</span>   * @param wait if true, blocking wait when queue is full<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.416"></a>
+<span class="sourceLineNo">417</span>      boolean wait) {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    if (cacheEnabled) {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.420"></a>
+<span class="sourceLineNo">421</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.421"></a>
+<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      } else {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      }<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  }<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      boolean inMemory, boolean wait) {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    if (!cacheEnabled) {<a name="line.431"></a>
+<span class="sourceLineNo">432</span>      return;<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    RAMQueueEntry re =<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    /**<a name="line.438"></a>
+<span class="sourceLineNo">439</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.439"></a>
+<span class="sourceLineNo">440</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.440"></a>
+<span class="sourceLineNo">441</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.441"></a>
+<span class="sourceLineNo">442</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.442"></a>
+<span class="sourceLineNo">443</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>     */<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      return;<a name="line.446"></a>
+<span class="sourceLineNo">447</span>    }<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    boolean successfulAddition = false;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    if (wait) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      try {<a name="line.452"></a>
+<span class="sourceLineNo">453</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.453"></a>
+<span class="sourceLineNo">454</span>      } catch (InterruptedException e) {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>        Thread.currentThread().interrupt();<a name="line.455"></a>
+<span class="sourceLineNo">456</span>      }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>    } else {<a name="line.457"></a>
+<span class="sourceLineNo">458</span>      successfulAddition = bq.offer(re);<a name="line.458"></a>
+<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
+<span class="sourceLineNo">460</span>    if (!successfulAddition) {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>      ramCache.remove(cacheKey);<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      cacheStats.failInsert();<a name="line.462"></a>
+<span class="sourceLineNo">463</span>    } else {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      this.blockNumber.increment();<a name="line.464"></a>
+<span class="sourceLineNo">465</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.465"></a>
+<span class="sourceLineNo">466</span>      blocksByHFile.add(cacheKey);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>    }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
+<span class="sourceLineNo">469</span><a name="line.469"></a>
+<span class="sourceLineNo">470</span>  /**<a name="line.470"></a>
+<span class="sourceLineNo">471</span>   * Get the buffer of the block with the specified key.<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @param key block's cache key<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.474"></a>
+<span class="sourceLineNo">475</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.475"></a>
+<span class="sourceLineNo">476</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
+<span class="sourceLineNo">478</span>  @Override<a name="line.478"></a>
+<span class="sourceLineNo">479</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.479"></a>
+<span class="sourceLineNo">480</span>      boolean updateCacheMetrics) {<a name="line.480"></a>
+<span class="sourceLineNo">481</span>    if (!cacheEnabled) {<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      return null;<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    }<a name="line.483"></a>
+<span class="sourceLineNo">484</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    if (re != null) {<a name="line.485"></a>
+<span class="sourceLineNo">486</span>      if (updateCacheMetrics) {<a name="line.486"></a>
+<span class="sourceLineNo">487</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.487"></a>
+<span class="sourceLineNo">488</span>      }<a name="line.488"></a>
+<span class="sourceLineNo">489</span>      re.access(accessCount.incrementAndGet());<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      return re.getData();<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    }<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    if (bucketEntry != null) {<a name="line.493"></a>
+<span class="sourceLineNo">494</span>      long start = System.nanoTime();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.495"></a>
+<span class="sourceLineNo">496</span>      try {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>        lock.readLock().lock();<a name="line.497"></a>
+<span class="sourceLineNo">498</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.498"></a>
+<span class="sourceLineNo">499</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.499"></a>
+<span class="sourceLineNo">500</span>        // existence here.<a name="line.500"></a>
+<span class="sourceLineNo">501</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.501"></a>
+<span class="sourceLineNo">502</span>          // TODO : change this area - should be removed after server cells and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>          // 12295 are available<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          int len = bucketEntry.getLength();<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          if (LOG.isTraceEnabled()) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>          }<a name="line.507"></a>
+<span class="sourceLineNo">508</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.508"></a>
+<span class="sourceLineNo">509</span>              bucketEntry.deserializerReference());<a name="line.509"></a>
+<span class="sourceLineNo">510</span>          long timeTaken = System.nanoTime() - start;<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          if (updateCacheMetrics) {<a name="line.511"></a>
+<span class="sourceLineNo">512</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.512"></a>
+<span class="sourceLineNo">513</span>            cacheStats.ioHit(timeTaken);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
+<span class="sourceLineNo">515</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>            bucketEntry.incrementRefCountAndGet();<a name="line.516"></a>
+<span class="sourceLineNo">517</span>          }<a name="line.517"></a>
+<span class="sourceLineNo">518</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.518"></a>
+<span class="sourceLineNo">519</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.519"></a>
+<span class="sourceLineNo">520</span>            ioErrorStartTime = -1;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          }<a name="line.521"></a>
+<span class="sourceLineNo">522</span>          return cachedBlock;<a name="line.522"></a>
+<span class="sourceLineNo">523</

<TRUNCATED>

[04/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/allclasses-frame.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/allclasses-frame.html b/testdevapidocs/allclasses-frame.html
index 1b24e50..b9a38d0 100644
--- a/testdevapidocs/allclasses-frame.html
+++ b/testdevapidocs/allclasses-frame.html
@@ -490,6 +490,7 @@
 <li><a href="org/apache/hadoop/hbase/RESTApiClusterManager.Service.html" title="enum in org.apache.hadoop.hbase" target="classFrame">RESTApiClusterManager.Service</a></li>
 <li><a href="org/apache/hadoop/hbase/chaos/actions/RestartActionBaseAction.html" title="class in org.apache.hadoop.hbase.chaos.actions" target="classFrame">RestartActionBaseAction</a></li>
 <li><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveMasterAction.html" title="class in org.apache.hadoop.hbase.chaos.actions" target="classFrame">RestartActiveMasterAction</a></li>
+<li><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions" target="classFrame">RestartActiveNameNodeAction</a></li>
 <li><a href="org/apache/hadoop/hbase/util/RestartMetaTest.html" title="class in org.apache.hadoop.hbase.util" target="classFrame">RestartMetaTest</a></li>
 <li><a href="org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions" target="classFrame">RestartRandomDataNodeAction</a></li>
 <li><a href="org/apache/hadoop/hbase/chaos/actions/RestartRandomRsAction.html" title="class in org.apache.hadoop.hbase.chaos.actions" target="classFrame">RestartRandomRsAction</a></li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/allclasses-noframe.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/allclasses-noframe.html b/testdevapidocs/allclasses-noframe.html
index 7b96d5d..8092c7f 100644
--- a/testdevapidocs/allclasses-noframe.html
+++ b/testdevapidocs/allclasses-noframe.html
@@ -490,6 +490,7 @@
 <li><a href="org/apache/hadoop/hbase/RESTApiClusterManager.Service.html" title="enum in org.apache.hadoop.hbase">RESTApiClusterManager.Service</a></li>
 <li><a href="org/apache/hadoop/hbase/chaos/actions/RestartActionBaseAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartActionBaseAction</a></li>
 <li><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveMasterAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartActiveMasterAction</a></li>
+<li><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartActiveNameNodeAction</a></li>
 <li><a href="org/apache/hadoop/hbase/util/RestartMetaTest.html" title="class in org.apache.hadoop.hbase.util">RestartMetaTest</a></li>
 <li><a href="org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartRandomDataNodeAction</a></li>
 <li><a href="org/apache/hadoop/hbase/chaos/actions/RestartRandomRsAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartRandomRsAction</a></li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/constant-values.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/constant-values.html b/testdevapidocs/constant-values.html
index 92fdfe5..6491347 100644
--- a/testdevapidocs/constant-values.html
+++ b/testdevapidocs/constant-values.html
@@ -2220,6 +2220,20 @@
 <td class="colLast"><code>"hbase.chaosmonkey.action.killmastertimeout"</code></td>
 </tr>
 <tr class="altColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.chaos.actions.Action.KILL_NAMENODE_TIMEOUT_DEFAULT">
+<!--   -->
+</a><code>protected&nbsp;static&nbsp;final&nbsp;long</code></td>
+<td><code><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#KILL_NAMENODE_TIMEOUT_DEFAULT">KILL_NAMENODE_TIMEOUT_DEFAULT</a></code></td>
+<td class="colLast"><code>60000L</code></td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.chaos.actions.Action.KILL_NAMENODE_TIMEOUT_KEY">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
+<td><code><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#KILL_NAMENODE_TIMEOUT_KEY">KILL_NAMENODE_TIMEOUT_KEY</a></code></td>
+<td class="colLast"><code>"hbase.chaosmonkey.action.killnamenodetimeout"</code></td>
+</tr>
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.chaos.actions.Action.KILL_RS_TIMEOUT_DEFAULT">
 <!--   -->
 </a><code>protected&nbsp;static&nbsp;final&nbsp;long</code></td>
@@ -2276,6 +2290,20 @@
 <td class="colLast"><code>"hbase.chaosmonkey.action.startmastertimeout"</code></td>
 </tr>
 <tr class="altColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.chaos.actions.Action.START_NAMENODE_TIMEOUT_DEFAULT">
+<!--   -->
+</a><code>protected&nbsp;static&nbsp;final&nbsp;long</code></td>
+<td><code><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#START_NAMENODE_TIMEOUT_DEFAULT">START_NAMENODE_TIMEOUT_DEFAULT</a></code></td>
+<td class="colLast"><code>60000L</code></td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.chaos.actions.Action.START_NAMENODE_TIMEOUT_KEY">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
+<td><code><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#START_NAMENODE_TIMEOUT_KEY">START_NAMENODE_TIMEOUT_KEY</a></code></td>
+<td class="colLast"><code>"hbase.chaosmonkey.action.startnamenodetimeout"</code></td>
+</tr>
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.chaos.actions.Action.START_RS_TIMEOUT_DEFAULT">
 <!--   -->
 </a><code>protected&nbsp;static&nbsp;final&nbsp;long</code></td>
@@ -2327,6 +2355,39 @@
 </li>
 <li class="blockList">
 <table class="constantsSummary" border="0" cellpadding="3" cellspacing="0" summary="Constant Field Values table, listing constant fields, and values">
+<caption><span>org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartActiveNameNodeAction</a></span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th scope="col">Constant Field</th>
+<th class="colLast" scope="col">Value</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.chaos.actions.RestartActiveNameNodeAction.ACTIVE_NN_LOCK_NAME">
+<!--   -->
+</a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
+<td><code><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html#ACTIVE_NN_LOCK_NAME">ACTIVE_NN_LOCK_NAME</a></code></td>
+<td class="colLast"><code>"ActiveStandbyElectorLock"</code></td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.chaos.actions.RestartActiveNameNodeAction.ZK_PARENT_ZNODE_DEFAULT">
+<!--   -->
+</a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
+<td><code><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html#ZK_PARENT_ZNODE_DEFAULT">ZK_PARENT_ZNODE_DEFAULT</a></code></td>
+<td class="colLast"><code>"/hadoop-ha"</code></td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.chaos.actions.RestartActiveNameNodeAction.ZK_PARENT_ZNODE_KEY">
+<!--   -->
+</a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
+<td><code><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html#ZK_PARENT_ZNODE_KEY">ZK_PARENT_ZNODE_KEY</a></code></td>
+<td class="colLast"><code>"ha.zookeeper.parent-znode"</code></td>
+</tr>
+</tbody>
+</table>
+</li>
+<li class="blockList">
+<table class="constantsSummary" border="0" cellpadding="3" cellspacing="0" summary="Constant Field Values table, listing constant fields, and values">
 <caption><span>org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/SplitAllRegionOfTableAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">SplitAllRegionOfTableAction</a></span><span class="tabEnd">&nbsp;</span></caption>
 <tr>
 <th class="colFirst" scope="col">Modifier and Type</th>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/index-all.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/index-all.html b/testdevapidocs/index-all.html
index 8c8b1c3..41763ca 100644
--- a/testdevapidocs/index-all.html
+++ b/testdevapidocs/index-all.html
@@ -498,6 +498,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/wal/InstrumentedLogWriter.html#activateFailure">activateFailure</a></span> - Static variable in class org.apache.hadoop.hbase.regionserver.wal.<a href="org/apache/hadoop/hbase/regionserver/wal/InstrumentedLogWriter.html" title="class in org.apache.hadoop.hbase.regionserver.wal">InstrumentedLogWriter</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html#ACTIVE_NN_LOCK_NAME">ACTIVE_NN_LOCK_NAME</a></span> - Static variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartActiveNameNodeAction</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/TestActiveMasterManager.DummyMaster.html#activeMasterManager">activeMasterManager</a></span> - Variable in class org.apache.hadoop.hbase.master.<a href="org/apache/hadoop/hbase/master/TestActiveMasterManager.DummyMaster.html" title="class in org.apache.hadoop.hbase.master">TestActiveMasterManager.DummyMaster</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.html#ACTOR_PATTERN">ACTOR_PATTERN</a></span> - Static variable in class org.apache.hadoop.hbase.mapred.<a href="org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.html" title="class in org.apache.hadoop.hbase.mapred">TestTableMapReduceUtil</a></dt>
@@ -22257,6 +22259,10 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#KILL_MASTER_TIMEOUT_KEY">KILL_MASTER_TIMEOUT_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/Action.html" title="class in org.apache.hadoop.hbase.chaos.actions">Action</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#KILL_NAMENODE_TIMEOUT_DEFAULT">KILL_NAMENODE_TIMEOUT_DEFAULT</a></span> - Static variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/Action.html" title="class in org.apache.hadoop.hbase.chaos.actions">Action</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#KILL_NAMENODE_TIMEOUT_KEY">KILL_NAMENODE_TIMEOUT_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/Action.html" title="class in org.apache.hadoop.hbase.chaos.actions">Action</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#KILL_RS_TIMEOUT_DEFAULT">KILL_RS_TIMEOUT_DEFAULT</a></span> - Static variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/Action.html" title="class in org.apache.hadoop.hbase.chaos.actions">Action</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#KILL_RS_TIMEOUT_KEY">KILL_RS_TIMEOUT_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/Action.html" title="class in org.apache.hadoop.hbase.chaos.actions">Action</a></dt>
@@ -22319,6 +22325,19 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/factories/UnbalanceMonkeyFactory.html#killMetaRs">killMetaRs</a></span> - Variable in class org.apache.hadoop.hbase.chaos.factories.<a href="org/apache/hadoop/hbase/chaos/factories/UnbalanceMonkeyFactory.html" title="class in org.apache.hadoop.hbase.chaos.factories">UnbalanceMonkeyFactory</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#killNameNode-org.apache.hadoop.hbase.ServerName-">killNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/Action.html" title="class in org.apache.hadoop.hbase.chaos.actions">Action</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/DistributedHBaseCluster.html#killNameNode-org.apache.hadoop.hbase.ServerName-">killNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/DistributedHBaseCluster.html" title="class in org.apache.hadoop.hbase">DistributedHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/HBaseCluster.html#killNameNode-org.apache.hadoop.hbase.ServerName-">killNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></dt>
+<dd>
+<div class="block">Kills the namenode process if this is a distributed cluster, otherwise, this causes master to
+ exit doing basic clean up only.</div>
+</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#killNameNode-org.apache.hadoop.hbase.ServerName-">killNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#killNameNodeTimeout">killNameNodeTimeout</a></span> - Variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/Action.html" title="class in org.apache.hadoop.hbase.chaos.actions">Action</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/replication/TestReplicationKillMasterRS.html#killOneMasterRS--">killOneMasterRS()</a></span> - Method in class org.apache.hadoop.hbase.replication.<a href="org/apache/hadoop/hbase/replication/TestReplicationKillMasterRS.html" title="class in org.apache.hadoop.hbase.replication">TestReplicationKillMasterRS</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/replication/TestReplicationKillMasterRSWithSeparateOldWALs.html#killOneMasterRS--">killOneMasterRS()</a></span> - Method in class org.apache.hadoop.hbase.replication.<a href="org/apache/hadoop/hbase/replication/TestReplicationKillMasterRSWithSeparateOldWALs.html" title="class in org.apache.hadoop.hbase.replication">TestReplicationKillMasterRSWithSeparateOldWALs</a></dt>
@@ -29031,6 +29050,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveMasterAction.html#perform--">perform()</a></span> - Method in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveMasterAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartActiveMasterAction</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html#perform--">perform()</a></span> - Method in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartActiveNameNodeAction</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.html#perform--">perform()</a></span> - Method in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartRandomDataNodeAction</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/RestartRandomRsAction.html#perform--">perform()</a></span> - Method in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/RestartRandomRsAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartRandomRsAction</a></dt>
@@ -33169,6 +33190,12 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/factories/SlowDeterministicMonkeyFactory.html#restartActiveMasterSleepTime">restartActiveMasterSleepTime</a></span> - Variable in class org.apache.hadoop.hbase.chaos.factories.<a href="org/apache/hadoop/hbase/chaos/factories/SlowDeterministicMonkeyFactory.html" title="class in org.apache.hadoop.hbase.chaos.factories">SlowDeterministicMonkeyFactory</a></dt>
 <dd>&nbsp;</dd>
+<dt><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions"><span class="typeNameLink">RestartActiveNameNodeAction</span></a> - Class in <a href="org/apache/hadoop/hbase/chaos/actions/package-summary.html">org.apache.hadoop.hbase.chaos.actions</a></dt>
+<dd>
+<div class="block">Action that tries to restart the active namenode.</div>
+</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html#RestartActiveNameNodeAction-long-">RestartActiveNameNodeAction(long)</a></span> - Constructor for class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartActiveNameNodeAction</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.html#restartAndAssert-long-long-int-int-">restartAndAssert(long, long, int, int)</a></span> - Method in class org.apache.hadoop.hbase.procedure2.store.wal.<a href="org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.html" title="class in org.apache.hadoop.hbase.procedure2.store.wal">TestWALProcedureStore</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/RestartActionBaseAction.html#restartDataNode-org.apache.hadoop.hbase.ServerName-long-">restartDataNode(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/RestartActionBaseAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartActionBaseAction</a></dt>
@@ -33198,6 +33225,8 @@
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/RestartMetaTest.html#RestartMetaTest--">RestartMetaTest()</a></span> - Constructor for class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/RestartMetaTest.html" title="class in org.apache.hadoop.hbase.util">RestartMetaTest</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/RestartActionBaseAction.html#restartNameNode-org.apache.hadoop.hbase.ServerName-long-">restartNameNode(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/RestartActionBaseAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartActionBaseAction</a></dt>
+<dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions"><span class="typeNameLink">RestartRandomDataNodeAction</span></a> - Class in <a href="org/apache/hadoop/hbase/chaos/actions/package-summary.html">org.apache.hadoop.hbase.chaos.actions</a></dt>
 <dd>
 <div class="block">Action that restarts a random datanode.</div>
@@ -39358,6 +39387,10 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#START_MASTER_TIMEOUT_KEY">START_MASTER_TIMEOUT_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/Action.html" title="class in org.apache.hadoop.hbase.chaos.actions">Action</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#START_NAMENODE_TIMEOUT_DEFAULT">START_NAMENODE_TIMEOUT_DEFAULT</a></span> - Static variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/Action.html" title="class in org.apache.hadoop.hbase.chaos.actions">Action</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#START_NAMENODE_TIMEOUT_KEY">START_NAMENODE_TIMEOUT_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/Action.html" title="class in org.apache.hadoop.hbase.chaos.actions">Action</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.html#START_ROW">START_ROW</a></span> - Static variable in class org.apache.hadoop.hbase.mapreduce.<a href="org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.html" title="class in org.apache.hadoop.hbase.mapreduce">IntegrationTestTableSnapshotInputFormat</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/rest/model/TestScannerModel.html#START_ROW">START_ROW</a></span> - Static variable in class org.apache.hadoop.hbase.rest.model.<a href="org/apache/hadoop/hbase/rest/model/TestScannerModel.html" title="class in org.apache.hadoop.hbase.rest.model">TestScannerModel</a></dt>
@@ -39603,6 +39636,19 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.html#startMonkey--">startMonkey()</a></span> - Method in class org.apache.hadoop.hbase.test.<a href="org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.html" title="class in org.apache.hadoop.hbase.test">IntegrationTestTimeBoundedRequestsWithRegionReplicas</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#startNameNode-org.apache.hadoop.hbase.ServerName-">startNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/Action.html" title="class in org.apache.hadoop.hbase.chaos.actions">Action</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/DistributedHBaseCluster.html#startNameNode-org.apache.hadoop.hbase.ServerName-">startNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/DistributedHBaseCluster.html" title="class in org.apache.hadoop.hbase">DistributedHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/HBaseCluster.html#startNameNode-org.apache.hadoop.hbase.ServerName-">startNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></dt>
+<dd>
+<div class="block">Starts a new namenode on the given hostname or if this is a mini/local cluster, silently logs
+ warning message.</div>
+</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#startNameNode-org.apache.hadoop.hbase.ServerName-">startNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/Action.html#startNameNodeTimeout">startNameNodeTimeout</a></span> - Variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/Action.html" title="class in org.apache.hadoop.hbase.chaos.actions">Action</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.ConcurrentPutRunnable.html#startNumber">startNumber</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.ConcurrentPutRunnable.html" title="class in org.apache.hadoop.hbase.regionserver">TestWalAndCompactingMemStoreFlush.ConcurrentPutRunnable</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/assignment/MockMasterServices.html#startProcedureExecutor-org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher-">startProcedureExecutor(RSProcedureDispatcher)</a></span> - Method in class org.apache.hadoop.hbase.master.assignment.<a href="org/apache/hadoop/hbase/master/assignment/MockMasterServices.html" title="class in org.apache.hadoop.hbase.master.assignment">MockMasterServices</a></dt>
@@ -39943,6 +39989,14 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/TestMetaWithReplicas.html#stopMasterAndValidateReplicaCount-int-int-">stopMasterAndValidateReplicaCount(int, int)</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/TestMetaWithReplicas.html" title="class in org.apache.hadoop.hbase.client">TestMetaWithReplicas</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/DistributedHBaseCluster.html#stopNameNode-org.apache.hadoop.hbase.ServerName-">stopNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/DistributedHBaseCluster.html" title="class in org.apache.hadoop.hbase">DistributedHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/HBaseCluster.html#stopNameNode-org.apache.hadoop.hbase.ServerName-">stopNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></dt>
+<dd>
+<div class="block">Stops the namenode if this is a distributed cluster, otherwise silently logs warning message.</div>
+</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#stopNameNode-org.apache.hadoop.hbase.ServerName-">stopNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/TestServerNonceManager.TestRunnable.html#stoppable">stoppable</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/TestServerNonceManager.TestRunnable.html" title="class in org.apache.hadoop.hbase.regionserver">TestServerNonceManager.TestRunnable</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.html#STOPPABLE">STOPPABLE</a></span> - Static variable in class org.apache.hadoop.hbase.replication.regionserver.<a href="org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.html" title="class in org.apache.hadoop.hbase.replication.regionserver">TestReplicationSink</a></dt>
@@ -55919,6 +55973,10 @@
 <dd>
 <div class="block">Tests a case where we replay only a flush start marker, then the region is closed.</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/TestWALFactory.html#testOnlySetMetaWALProvider--">testOnlySetMetaWALProvider()</a></span> - Method in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/TestWALFactory.html" title="class in org.apache.hadoop.hbase.wal">TestWALFactory</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/TestWALFactory.html#testOnlySetWALProvider--">testOnlySetWALProvider()</a></span> - Method in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/TestWALFactory.html" title="class in org.apache.hadoop.hbase.wal">TestWALFactory</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.html#testOnRegionChange--">testOnRegionChange()</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.html" title="class in org.apache.hadoop.hbase.client">TestHTableMultiplexerFlushCache</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.html#testOnRegionMove--">testOnRegionMove()</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.html" title="class in org.apache.hadoop.hbase.client">TestHTableMultiplexerFlushCache</a></dt>
@@ -64160,6 +64218,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.html#TestWALProcedureStoreOnHDFS--">TestWALProcedureStoreOnHDFS()</a></span> - Constructor for class org.apache.hadoop.hbase.master.procedure.<a href="org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.html" title="class in org.apache.hadoop.hbase.master.procedure">TestWALProcedureStoreOnHDFS</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/TestWALFactory.html#testWALProviders--">testWALProviders()</a></span> - Method in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/TestWALFactory.html" title="class in org.apache.hadoop.hbase.wal">TestWALFactory</a></dt>
+<dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.html" title="class in org.apache.hadoop.hbase.wal"><span class="typeNameLink">TestWALReaderOnSecureWAL</span></a> - Class in <a href="org/apache/hadoop/hbase/wal/package-summary.html">org.apache.hadoop.hbase.wal</a></dt>
 <dd>
 <div class="block">Test that verifies WAL written by SecureProtobufLogWriter is not readable by ProtobufLogReader</div>
@@ -68106,6 +68166,22 @@ the order they are declared.</div>
 <dd>
 <div class="block">Wait for the namenode.</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStart(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/DistributedHBaseCluster.html" title="class in org.apache.hadoop.hbase">DistributedHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/HBaseCluster.html#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStart(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></dt>
+<dd>
+<div class="block">Wait for the specified namenode to join the cluster</div>
+</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStart(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStop(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/DistributedHBaseCluster.html" title="class in org.apache.hadoop.hbase">DistributedHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/HBaseCluster.html#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStop(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></dt>
+<dd>
+<div class="block">Wait for the specified namenode to stop</div>
+</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStop(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.html#waitForNumReplicas-int-">waitForNumReplicas(int)</a></span> - Method in class org.apache.hadoop.hbase.master.procedure.<a href="org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.html" title="class in org.apache.hadoop.hbase.master.procedure">TestWALProcedureStoreOnHDFS</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.SimpleSubprocedurePool.html#waitForOutstandingTasks--">waitForOutstandingTasks()</a></span> - Method in class org.apache.hadoop.hbase.procedure.<a href="org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.SimpleSubprocedurePool.html" title="class in org.apache.hadoop.hbase.procedure">SimpleRSProcedureManager.SimpleSubprocedurePool</a></dt>
@@ -69079,6 +69155,10 @@ the order they are declared.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/replication/TestReplicationStateBasic.html#ZK_MAX_COUNT">ZK_MAX_COUNT</a></span> - Static variable in class org.apache.hadoop.hbase.replication.<a href="org/apache/hadoop/hbase/replication/TestReplicationStateBasic.html" title="class in org.apache.hadoop.hbase.replication">TestReplicationStateBasic</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html#ZK_PARENT_ZNODE_DEFAULT">ZK_PARENT_ZNODE_DEFAULT</a></span> - Static variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartActiveNameNodeAction</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html#ZK_PARENT_ZNODE_KEY">ZK_PARENT_ZNODE_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.chaos.actions.<a href="org/apache/hadoop/hbase/chaos/actions/RestartActiveNameNodeAction.html" title="class in org.apache.hadoop.hbase.chaos.actions">RestartActiveNameNodeAction</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#ZK_SESSION_TIMEOUT">ZK_SESSION_TIMEOUT</a></span> - Static variable in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html" title="class in org.apache.hadoop.hbase.client">TestSeparateClientZKCluster</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/replication/TestReplicationStateBasic.html#ZK_SLEEP_INTERVAL">ZK_SLEEP_INTERVAL</a></span> - Static variable in class org.apache.hadoop.hbase.replication.<a href="org/apache/hadoop/hbase/replication/TestReplicationStateBasic.html" title="class in org.apache.hadoop.hbase.replication">TestReplicationStateBasic</a></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/DistributedHBaseCluster.ServerNameIgnoreStartCodeComparator.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/DistributedHBaseCluster.ServerNameIgnoreStartCodeComparator.html b/testdevapidocs/org/apache/hadoop/hbase/DistributedHBaseCluster.ServerNameIgnoreStartCodeComparator.html
index 413efaa..c531288 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/DistributedHBaseCluster.ServerNameIgnoreStartCodeComparator.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/DistributedHBaseCluster.ServerNameIgnoreStartCodeComparator.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.434">DistributedHBaseCluster.ServerNameIgnoreStartCodeComparator</a>
+<pre>private static class <a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.465">DistributedHBaseCluster.ServerNameIgnoreStartCodeComparator</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true" title="class or interface in java.util">Comparator</a>&lt;org.apache.hadoop.hbase.ServerName&gt;</pre>
 </li>
@@ -197,7 +197,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Comparat
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ServerNameIgnoreStartCodeComparator</h4>
-<pre>private&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.ServerNameIgnoreStartCodeComparator.html#line.434">ServerNameIgnoreStartCodeComparator</a>()</pre>
+<pre>private&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.ServerNameIgnoreStartCodeComparator.html#line.465">ServerNameIgnoreStartCodeComparator</a>()</pre>
 </li>
 </ul>
 </li>
@@ -214,7 +214,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Comparat
 <ul class="blockListLast">
 <li class="blockList">
 <h4>compare</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.ServerNameIgnoreStartCodeComparator.html#line.436">compare</a>(org.apache.hadoop.hbase.ServerName&nbsp;o1,
+<pre>public&nbsp;int&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.ServerNameIgnoreStartCodeComparator.html#line.467">compare</a>(org.apache.hadoop.hbase.ServerName&nbsp;o1,
                    org.apache.hadoop.hbase.ServerName&nbsp;o2)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/DistributedHBaseCluster.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/DistributedHBaseCluster.html b/testdevapidocs/org/apache/hadoop/hbase/DistributedHBaseCluster.html
index 131fe46..bf5606f 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/DistributedHBaseCluster.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/DistributedHBaseCluster.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -293,57 +293,64 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 </tr>
 <tr id="i12" class="altColor">
 <td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#killNameNode-org.apache.hadoop.hbase.ServerName-">killNameNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
+<div class="block">Kills the namenode process if this is a distributed cluster, otherwise, this causes master to
+ exit doing basic clean up only.</div>
+</td>
+</tr>
+<tr id="i13" class="rowColor">
+<td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#killRegionServer-org.apache.hadoop.hbase.ServerName-">killRegionServer</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Kills the region server process if this is a distributed cluster, otherwise
  this causes the region server to exit doing basic clean up only.</div>
 </td>
 </tr>
-<tr id="i13" class="rowColor">
+<tr id="i14" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#killZkNode-org.apache.hadoop.hbase.ServerName-">killZkNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Kills the zookeeper node process if this is a distributed cluster, otherwise,
  this causes master to exit doing basic clean up only.</div>
 </td>
 </tr>
-<tr id="i14" class="altColor">
+<tr id="i15" class="rowColor">
 <td class="colFirst"><code>protected boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#restoreAdmin--">restoreAdmin</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i15" class="rowColor">
+<tr id="i16" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#restoreClusterMetrics-org.apache.hadoop.hbase.ClusterMetrics-">restoreClusterMetrics</a></span>(org.apache.hadoop.hbase.ClusterMetrics&nbsp;initial)</code>
 <div class="block">Restores the cluster to given state if this is a real cluster,
  otherwise does nothing.</div>
 </td>
 </tr>
-<tr id="i16" class="altColor">
+<tr id="i17" class="rowColor">
 <td class="colFirst"><code>protected boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#restoreMasters-org.apache.hadoop.hbase.ClusterMetrics-org.apache.hadoop.hbase.ClusterMetrics-">restoreMasters</a></span>(org.apache.hadoop.hbase.ClusterMetrics&nbsp;initial,
               org.apache.hadoop.hbase.ClusterMetrics&nbsp;current)</code>&nbsp;</td>
 </tr>
-<tr id="i17" class="rowColor">
+<tr id="i18" class="altColor">
 <td class="colFirst"><code>protected boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#restoreRegionServers-org.apache.hadoop.hbase.ClusterMetrics-org.apache.hadoop.hbase.ClusterMetrics-">restoreRegionServers</a></span>(org.apache.hadoop.hbase.ClusterMetrics&nbsp;initial,
                     org.apache.hadoop.hbase.ClusterMetrics&nbsp;current)</code>&nbsp;</td>
 </tr>
-<tr id="i18" class="altColor">
+<tr id="i19" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#setClusterManager-org.apache.hadoop.hbase.ClusterManager-">setClusterManager</a></span>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.html" title="interface in org.apache.hadoop.hbase">ClusterManager</a>&nbsp;clusterManager)</code>&nbsp;</td>
 </tr>
-<tr id="i19" class="rowColor">
+<tr id="i20" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#shutdown--">shutdown</a></span>()</code>
 <div class="block">Shut down the HBase cluster</div>
 </td>
 </tr>
-<tr id="i20" class="altColor">
+<tr id="i21" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#startDataNode-org.apache.hadoop.hbase.ServerName-">startDataNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Starts a new datanode on the given hostname or if this is a mini/local cluster,
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i21" class="rowColor">
+<tr id="i22" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#startMaster-java.lang.String-int-">startMaster</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
            int&nbsp;port)</code>
@@ -351,7 +358,14 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
  starts a master locally.</div>
 </td>
 </tr>
-<tr id="i22" class="altColor">
+<tr id="i23" class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#startNameNode-org.apache.hadoop.hbase.ServerName-">startNameNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
+<div class="block">Starts a new namenode on the given hostname or if this is a mini/local cluster, silently logs
+ warning message.</div>
+</td>
+</tr>
+<tr id="i24" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#startRegionServer-java.lang.String-int-">startRegionServer</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                  int&nbsp;port)</code>
@@ -359,7 +373,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
  starts a region server locally.</div>
 </td>
 </tr>
-<tr id="i23" class="rowColor">
+<tr id="i25" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#startZkNode-java.lang.String-int-">startZkNode</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
            int&nbsp;port)</code>
@@ -367,94 +381,114 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i24" class="altColor">
+<tr id="i26" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#stopDataNode-org.apache.hadoop.hbase.ServerName-">stopDataNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Stops the datanode if this is a distributed cluster, otherwise
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i25" class="rowColor">
+<tr id="i27" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#stopMaster-org.apache.hadoop.hbase.ServerName-">stopMaster</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Stops the given master, by attempting a gradual stop.</div>
 </td>
 </tr>
-<tr id="i26" class="altColor">
+<tr id="i28" class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#stopNameNode-org.apache.hadoop.hbase.ServerName-">stopNameNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
+<div class="block">Stops the namenode if this is a distributed cluster, otherwise silently logs warning message.</div>
+</td>
+</tr>
+<tr id="i29" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#stopRegionServer-org.apache.hadoop.hbase.ServerName-">stopRegionServer</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Stops the given region server, by attempting a gradual stop.</div>
 </td>
 </tr>
-<tr id="i27" class="rowColor">
+<tr id="i30" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#stopZkNode-org.apache.hadoop.hbase.ServerName-">stopZkNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Stops the region zookeeper if this is a distributed cluster, otherwise
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i28" class="altColor">
+<tr id="i31" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForActiveAndReadyMaster-long-">waitForActiveAndReadyMaster</a></span>(long&nbsp;timeout)</code>
 <div class="block">Blocks until there is an active master and that master has completed
  initialization.</div>
 </td>
 </tr>
-<tr id="i29" class="rowColor">
+<tr id="i32" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForDataNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForDataNodeToStart</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                       long&nbsp;timeout)</code>
 <div class="block">Wait for the specified datanode to join the cluster</div>
 </td>
 </tr>
-<tr id="i30" class="altColor">
+<tr id="i33" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForDataNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForDataNodeToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                      long&nbsp;timeout)</code>
 <div class="block">Wait for the specified datanode to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i31" class="rowColor">
+<tr id="i34" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForMasterToStop-org.apache.hadoop.hbase.ServerName-long-">waitForMasterToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                    long&nbsp;timeout)</code>
 <div class="block">Wait for the specified master to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i32" class="altColor">
+<tr id="i35" class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStart</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+                      long&nbsp;timeout)</code>
+<div class="block">Wait for the specified namenode to join the cluster</div>
+</td>
+</tr>
+<tr id="i36" class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+                     long&nbsp;timeout)</code>
+<div class="block">Wait for the specified namenode to stop</div>
+</td>
+</tr>
+<tr id="i37" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForRegionServerToStop-org.apache.hadoop.hbase.ServerName-long-">waitForRegionServerToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                          long&nbsp;timeout)</code>
 <div class="block">Wait for the specified region server to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i33" class="rowColor">
+<tr id="i38" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForServiceToStart-org.apache.hadoop.hbase.ClusterManager.ServiceType-org.apache.hadoop.hbase.ServerName-long-">waitForServiceToStart</a></span>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                      org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                      long&nbsp;timeout)</code>&nbsp;</td>
 </tr>
-<tr id="i34" class="altColor">
+<tr id="i39" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForServiceToStop-org.apache.hadoop.hbase.ClusterManager.ServiceType-org.apache.hadoop.hbase.ServerName-long-">waitForServiceToStop</a></span>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                     org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                     long&nbsp;timeout)</code>&nbsp;</td>
 </tr>
-<tr id="i35" class="rowColor">
+<tr id="i40" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForZkNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForZkNodeToStart</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                     long&nbsp;timeout)</code>
 <div class="block">Wait for the specified zookeeper node to join the cluster</div>
 </td>
 </tr>
-<tr id="i36" class="altColor">
+<tr id="i41" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitForZkNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForZkNodeToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                    long&nbsp;timeout)</code>
 <div class="block">Wait for the specified zookeeper node to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i37" class="rowColor">
+<tr id="i42" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/DistributedHBaseCluster.html#waitUntilShutDown--">waitUntilShutDown</a></span>()</code>
 <div class="block">Wait for HBase Cluster to shut down.</div>
@@ -966,13 +1000,107 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 </dl>
 </li>
 </ul>
+<a name="startNameNode-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>startNameNode</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.208">startNameNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startNameNode-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
+<div class="block">Starts a new namenode on the given hostname or if this is a mini/local cluster, silently logs
+ warning message.</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startNameNode-org.apache.hadoop.hbase.ServerName-">startNameNode</a></code>&nbsp;in class&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong</dd>
+</dl>
+</li>
+</ul>
+<a name="killNameNode-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>killNameNode</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.215">killNameNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#killNameNode-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
+<div class="block">Kills the namenode process if this is a distributed cluster, otherwise, this causes master to
+ exit doing basic clean up only.</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#killNameNode-org.apache.hadoop.hbase.ServerName-">killNameNode</a></code>&nbsp;in class&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong</dd>
+</dl>
+</li>
+</ul>
+<a name="stopNameNode-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>stopNameNode</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.222">stopNameNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopNameNode-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
+<div class="block">Stops the namenode if this is a distributed cluster, otherwise silently logs warning message.</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopNameNode-org.apache.hadoop.hbase.ServerName-">stopNameNode</a></code>&nbsp;in class&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong</dd>
+</dl>
+</li>
+</ul>
+<a name="waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>waitForNameNodeToStart</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.229">waitForNameNodeToStart</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+                                   long&nbsp;timeout)
+                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">HBaseCluster</a></code></span></div>
+<div class="block">Wait for the specified namenode to join the cluster</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStart</a></code>&nbsp;in class&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong or timeout occurs</dd>
+</dl>
+</li>
+</ul>
+<a name="waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>waitForNameNodeToStop</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.234">waitForNameNodeToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+                                  long&nbsp;timeout)
+                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">HBaseCluster</a></code></span></div>
+<div class="block">Wait for the specified namenode to stop</div>
+<dl>
+<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
+<dd><code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStop</a></code>&nbsp;in class&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong or timeout occurs</dd>
+</dl>
+</li>
+</ul>
 <a name="waitForServiceToStop-org.apache.hadoop.hbase.ClusterManager.ServiceType-org.apache.hadoop.hbase.ServerName-long-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForServiceToStop</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.207">waitForServiceToStop</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.238">waitForServiceToStop</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                                   org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                   long&nbsp;timeout)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -988,7 +1116,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForServiceToStart</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.221">waitForServiceToStart</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>private&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.252">waitForServiceToStart</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                                    org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                    long&nbsp;timeout)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1004,7 +1132,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>getMasterAdminService</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.237">getMasterAdminService</a>()
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.268">getMasterAdminService</a>()
                                                                                                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#getMasterAdminService--">HBaseCluster</a></code></span></div>
 <div class="block">Returns an <code>MasterService.BlockingInterface</code> to the active master</div>
@@ -1022,7 +1150,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>startMaster</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.243">startMaster</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.274">startMaster</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                         int&nbsp;port)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startMaster-java.lang.String-int-">HBaseCluster</a></code></span></div>
@@ -1044,7 +1172,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>killMaster</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.249">killMaster</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.280">killMaster</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#killMaster-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
 <div class="block">Kills the master process if this is a distributed cluster, otherwise,
@@ -1063,7 +1191,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>stopMaster</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.255">stopMaster</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.286">stopMaster</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopMaster-org.apache.hadoop.hbase.ServerName-">HBaseCluster</a></code></span></div>
 <div class="block">Stops the given master, by attempting a gradual stop.</div>
@@ -1081,7 +1209,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForMasterToStop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.261">waitForMasterToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.292">waitForMasterToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                 long&nbsp;timeout)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForMasterToStop-org.apache.hadoop.hbase.ServerName-long-">HBaseCluster</a></code></span></div>
@@ -1100,7 +1228,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForActiveAndReadyMaster</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.266">waitForActiveAndReadyMaster</a>(long&nbsp;timeout)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.297">waitForActiveAndReadyMaster</a>(long&nbsp;timeout)
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForActiveAndReadyMaster-long-">HBaseCluster</a></code></span></div>
 <div class="block">Blocks until there is an active master and that master has completed
@@ -1124,7 +1252,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>getServerHoldingRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.ServerName&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.283">getServerHoldingRegion</a>(org.apache.hadoop.hbase.TableName&nbsp;tn,
+<pre>public&nbsp;org.apache.hadoop.hbase.ServerName&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.314">getServerHoldingRegion</a>(org.apache.hadoop.hbase.TableName&nbsp;tn,
                                                                  byte[]&nbsp;regionName)
                                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#getServerHoldingRegion-org.apache.hadoop.hbase.TableName-byte:A-">HBaseCluster</a></code></span></div>
@@ -1148,7 +1276,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>waitUntilShutDown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.301">waitUntilShutDown</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.332">waitUntilShutDown</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitUntilShutDown--">HBaseCluster</a></code></span></div>
 <div class="block">Wait for HBase Cluster to shut down.</div>
 <dl>
@@ -1163,7 +1291,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>shutdown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.307">shutdown</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.338">shutdown</a>()
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#shutdown--">HBaseCluster</a></code></span></div>
 <div class="block">Shut down the HBase cluster</div>
@@ -1181,7 +1309,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>isDistributedCluster</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.313">isDistributedCluster</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.344">isDistributedCluster</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#isDistributedCluster--">isDistributedCluster</a></code>&nbsp;in class&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="class in org.apache.hadoop.hbase">HBaseCluster</a></code></dd>
@@ -1197,7 +1325,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>restoreClusterMetrics</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.318">restoreClusterMetrics</a>(org.apache.hadoop.hbase.ClusterMetrics&nbsp;initial)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.349">restoreClusterMetrics</a>(org.apache.hadoop.hbase.ClusterMetrics&nbsp;initial)
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#restoreClusterMetrics-org.apache.hadoop.hbase.ClusterMetrics-">HBaseCluster</a></code></span></div>
 <div class="block">Restores the cluster to given state if this is a real cluster,
@@ -1220,7 +1348,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>restoreMasters</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.333">restoreMasters</a>(org.apache.hadoop.hbase.ClusterMetrics&nbsp;initial,
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.364">restoreMasters</a>(org.apache.hadoop.hbase.ClusterMetrics&nbsp;initial,
                                  org.apache.hadoop.hbase.ClusterMetrics&nbsp;current)</pre>
 </li>
 </ul>
@@ -1230,7 +1358,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockList">
 <li class="blockList">
 <h4>restoreRegionServers</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.445">restoreRegionServers</a>(org.apache.hadoop.hbase.ClusterMetrics&nbsp;initial,
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.476">restoreRegionServers</a>(org.apache.hadoop.hbase.ClusterMetrics&nbsp;initial,
                                        org.apache.hadoop.hbase.ClusterMetrics&nbsp;current)</pre>
 </li>
 </ul>
@@ -1240,7 +1368,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html" title="c
 <ul class="blockListLast">
 <li class="blockList">
 <h4>restoreAdmin</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.500">restoreAdmin</a>()
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/DistributedHBaseCluster.html#line.531">restoreAdmin</a>()
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>


[36/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index 2d9fc38..a607492 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -348,11 +348,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
index c08b2dc..0f18bc4 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
@@ -763,7 +763,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/procedure/Abst
 <ul class="blockList">
 <li class="blockList">
 <h4>setTableStateToDisabled</h4>
-<pre>protected static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html#line.302">setTableStateToDisabled</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.html" title="class in org.apache.hadoop.hbase.master.procedure">MasterProcedureEnv</a>&nbsp;env,
+<pre>protected static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html#line.303">setTableStateToDisabled</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.html" title="class in org.apache.hadoop.hbase.master.procedure">MasterProcedureEnv</a>&nbsp;env,
                                               <a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)
                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Mark table state to Disabled</div>
@@ -781,7 +781,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/procedure/Abst
 <ul class="blockList">
 <li class="blockList">
 <h4>postDisable</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html#line.319">postDisable</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.html" title="class in org.apache.hadoop.hbase.master.procedure">MasterProcedureEnv</a>&nbsp;env,
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html#line.320">postDisable</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.html" title="class in org.apache.hadoop.hbase.master.procedure">MasterProcedureEnv</a>&nbsp;env,
                            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableState&nbsp;state)
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -802,7 +802,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/procedure/Abst
 <ul class="blockList">
 <li class="blockList">
 <h4>isTraceEnabled</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html#line.329">isTraceEnabled</a>()</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html#line.330">isTraceEnabled</a>()</pre>
 <div class="block">The procedure could be restarted from a different machine. If the variable is null, we need to
  retrieve it.</div>
 <dl>
@@ -817,7 +817,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/procedure/Abst
 <ul class="blockListLast">
 <li class="blockList">
 <h4>runCoprocessorAction</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html#line.343">runCoprocessorAction</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.html" title="class in org.apache.hadoop.hbase.master.procedure">MasterProcedureEnv</a>&nbsp;env,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html#line.344">runCoprocessorAction</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.html" title="class in org.apache.hadoop.hbase.master.procedure">MasterProcedureEnv</a>&nbsp;env,
                                   org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableState&nbsp;state)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index 71e02ff..ddea7b8 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -216,10 +216,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MetaProcedureInterface.MetaOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">MetaProcedureInterface.MetaOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MetaProcedureInterface.MetaOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">MetaProcedureInterface.MetaOperationType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
index 7b435d2..4b549da 100644
--- a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
@@ -127,8 +127,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">MonitoredTask.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/TaskMonitor.TaskFilter.TaskType.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">TaskMonitor.TaskFilter.TaskType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">MonitoredTask.State</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/nio/class-use/ByteBuff.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/nio/class-use/ByteBuff.html b/devapidocs/org/apache/hadoop/hbase/nio/class-use/ByteBuff.html
index 1c6b792..7a1bbc8 100644
--- a/devapidocs/org/apache/hadoop/hbase/nio/class-use/ByteBuff.html
+++ b/devapidocs/org/apache/hadoop/hbase/nio/class-use/ByteBuff.html
@@ -430,11 +430,21 @@
 </td>
 </tr>
 <tr class="altColor">
+<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
+<td class="colLast"><span class="typeNameLabel">HFileBlock.BlockDeserializer.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-">deserialize</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;b)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="type parameter in CacheableDeserializer">T</a></code></td>
 <td class="colLast"><span class="typeNameLabel">CacheableDeserializer.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-boolean-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType-">deserialize</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;b,
            boolean&nbsp;reuse,
            <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a>&nbsp;memType)</code>&nbsp;</td>
 </tr>
+<tr class="altColor">
+<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
+<td class="colLast"><span class="typeNameLabel">HFileBlock.BlockDeserializer.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-boolean-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType-">deserialize</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buf,
+           boolean&nbsp;reuse,
+           <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a>&nbsp;memType)</code>&nbsp;</td>
+</tr>
 <tr class="rowColor">
 <td class="colFirst"><code>protected byte[]</code></td>
 <td class="colLast"><span class="typeNameLabel">HFileBlockIndex.BlockIndexReader.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexReader.html#getNonRootIndexedKey-org.apache.hadoop.hbase.nio.ByteBuff-int-">getNonRootIndexedKey</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;nonRootIndex,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index af7f658..ed18c4b 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -438,18 +438,18 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index fc48127..4e335d1 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -216,11 +216,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 6f95fa3..a0fa2a2 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -229,13 +229,13 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottlingException.Type</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 308a8c8..fe426ae 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -704,20 +704,20 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index 23060c2..2731576 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -130,9 +130,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index feee307..46651a5 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -247,9 +247,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.Type.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">RingBufferTruck.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrResult.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">ProtobufLogReader.WALHdrResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.Type.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">RingBufferTruck.Type</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
index 4ade4c1..03e1383 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
@@ -199,9 +199,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftMetrics.ThriftServerType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftMetrics.ThriftServerType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftServerRunner.ImplType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index f5c7b55..04cba7d 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -515,14 +515,14 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLock.ReferenceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLock.ReferenceType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html b/devapidocs/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html
index 08bb0a1..c577172 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":9,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":9,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -315,6 +315,10 @@ implements <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html"
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html#getWALStream--">getWALStream</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i11" class="rowColor">
+<td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a></code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html#getWrappedProvider--">getWrappedProvider</a></span>()</code>&nbsp;</td>
+</tr>
+<tr id="i12" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html#init-org.apache.hadoop.hbase.wal.WALFactory-org.apache.hadoop.conf.Configuration-java.lang.String-">init</a></span>(<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory,
     org.apache.hadoop.conf.Configuration&nbsp;conf,
@@ -322,22 +326,22 @@ implements <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html"
 <div class="block">Set up the provider to create wals.</div>
 </td>
 </tr>
-<tr id="i12" class="altColor">
+<tr id="i13" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html#peerSyncReplicationStateChange-java.lang.String-org.apache.hadoop.hbase.replication.SyncReplicationState-org.apache.hadoop.hbase.replication.SyncReplicationState-int-">peerSyncReplicationStateChange</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;peerId,
                               <a href="../../../../../org/apache/hadoop/hbase/replication/SyncReplicationState.html" title="enum in org.apache.hadoop.hbase.replication">SyncReplicationState</a>&nbsp;from,
                               <a href="../../../../../org/apache/hadoop/hbase/replication/SyncReplicationState.html" title="enum in org.apache.hadoop.hbase.replication">SyncReplicationState</a>&nbsp;to,
                               int&nbsp;stage)</code>&nbsp;</td>
 </tr>
-<tr id="i13" class="rowColor">
+<tr id="i14" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html#safeClose-org.apache.hadoop.hbase.wal.WAL-">safeClose</a></span>(<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.html" title="interface in org.apache.hadoop.hbase.wal">WAL</a>&nbsp;wal)</code>&nbsp;</td>
 </tr>
-<tr id="i14" class="altColor">
+<tr id="i15" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html#setPeerInfoProvider-org.apache.hadoop.hbase.replication.regionserver.SyncReplicationPeerInfoProvider-">setPeerInfoProvider</a></span>(<a href="../../../../../org/apache/hadoop/hbase/replication/regionserver/SyncReplicationPeerInfoProvider.html" title="interface in org.apache.hadoop.hbase.replication.regionserver">SyncReplicationPeerInfoProvider</a>&nbsp;peerInfoProvider)</code>&nbsp;</td>
 </tr>
-<tr id="i15" class="rowColor">
+<tr id="i16" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html#shutdown--">shutdown</a></span>()</code>
 <div class="block">persist outstanding WALs to storage and stop accepting new appends.</div>
@@ -751,7 +755,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html"
 <a name="getSyncReplicationPeerIdFromWALName-java.lang.String-">
 <!--   -->
 </a>
-<ul class="blockListLast">
+<ul class="blockList">
 <li class="blockList">
 <h4>getSyncReplicationPeerIdFromWALName</h4>
 <pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html#line.333">getSyncReplicationPeerIdFromWALName</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
@@ -763,6 +767,15 @@ implements <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html"
  </p></div>
 </li>
 </ul>
+<a name="getWrappedProvider--">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>getWrappedProvider</h4>
+<pre><a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html#line.348">getWrappedProvider</a>()</pre>
+</li>
+</ul>
 </li>
 </ul>
 </li>


[14/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
index bd3c59e..21e240a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
@@ -33,62 +33,62 @@
 <span class="sourceLineNo">025</span>import java.io.FileNotFoundException;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.io.FileOutputStream;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.io.IOException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.io.ObjectInputStream;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.io.ObjectOutputStream;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.io.Serializable;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import java.nio.ByteBuffer;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.util.ArrayList;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.util.Comparator;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.util.HashSet;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.util.Iterator;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import java.util.List;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import java.util.Map;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import java.util.NavigableSet;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import java.util.PriorityQueue;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import java.util.Set;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import java.util.concurrent.BlockingQueue;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import java.util.concurrent.ConcurrentMap;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.TimeUnit;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import java.util.concurrent.atomic.LongAdder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import java.util.concurrent.locks.Lock;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.conf.Configuration;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.util.StringUtils;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.slf4j.Logger;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.slf4j.LoggerFactory;<a name="line.79"></a>
-<span class="sourceLineNo">080</span><a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.83"></a>
+<span class="sourceLineNo">028</span>import java.io.Serializable;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.nio.ByteBuffer;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.ArrayList;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.Comparator;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import java.util.HashSet;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.Iterator;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.List;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.Map;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.NavigableSet;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import java.util.PriorityQueue;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import java.util.Set;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import java.util.concurrent.BlockingQueue;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import java.util.concurrent.ConcurrentMap;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import java.util.concurrent.Executors;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.TimeUnit;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.LongAdder;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.locks.Lock;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.conf.Configuration;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.protobuf.ProtobufMagic;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.util.StringUtils;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.Logger;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.slf4j.LoggerFactory;<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos;<a name="line.83"></a>
 <span class="sourceLineNo">084</span><a name="line.84"></a>
 <span class="sourceLineNo">085</span>/**<a name="line.85"></a>
 <span class="sourceLineNo">086</span> * BucketCache uses {@link BucketAllocator} to allocate/free blocks, and uses<a name="line.86"></a>
@@ -172,1540 +172,1557 @@
 <span class="sourceLineNo">164</span>  private volatile boolean freeInProgress = false;<a name="line.164"></a>
 <span class="sourceLineNo">165</span>  private final Lock freeSpaceLock = new ReentrantLock();<a name="line.165"></a>
 <span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  private UniqueIndexMap&lt;Integer&gt; deserialiserMap = new UniqueIndexMap&lt;&gt;();<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  private final LongAdder heapSize = new LongAdder();<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  /** Current number of cached elements */<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.172"></a>
-<span class="sourceLineNo">173</span><a name="line.173"></a>
-<span class="sourceLineNo">174</span>  /** Cache access count (sequential ID) */<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  boolean wait_when_cache = false;<a name="line.181"></a>
+<span class="sourceLineNo">167</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  private final LongAdder heapSize = new LongAdder();<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  /** Current number of cached elements */<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>  /** Cache access count (sequential ID) */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.176"></a>
+<span class="sourceLineNo">177</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.177"></a>
+<span class="sourceLineNo">178</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.178"></a>
+<span class="sourceLineNo">179</span>  boolean wait_when_cache = false;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.181"></a>
 <span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  private final String persistencePath;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  private final long cacheCapacity;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /** Approximate block size */<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  private final long blockSize;<a name="line.188"></a>
-<span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  private final int ioErrorsTolerationDuration;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  // 1 min<a name="line.192"></a>
-<span class="sourceLineNo">193</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.193"></a>
-<span class="sourceLineNo">194</span><a name="line.194"></a>
-<span class="sourceLineNo">195</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  // reset after a successful read/write.<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private volatile long ioErrorStartTime = -1;<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * &lt;p&gt;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   */<a name="line.204"></a>
-<span class="sourceLineNo">205</span>  @VisibleForTesting<a name="line.205"></a>
-<span class="sourceLineNo">206</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        @Override<a name="line.210"></a>
-<span class="sourceLineNo">211</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.212"></a>
-<span class="sourceLineNo">213</span>          if (nameComparison != 0) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            return nameComparison;<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>          if (a.getOffset() == b.getOffset()) {<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            return 0;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            return -1;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>          }<a name="line.221"></a>
-<span class="sourceLineNo">222</span>          return 1;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      });<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.226"></a>
-<span class="sourceLineNo">227</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  // Allocate or free space for the block<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private BucketAllocator bucketAllocator;<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  private float acceptableFactor;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private float minFactor;<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private float extraFreeFactor;<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /** Single access bucket size */<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  private float singleFactor;<a name="line.243"></a>
-<span class="sourceLineNo">244</span><a name="line.244"></a>
-<span class="sourceLineNo">245</span>  /** Multiple access bucket size */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private float multiFactor;<a name="line.246"></a>
-<span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  /** In-memory bucket size */<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private float memoryFactor;<a name="line.249"></a>
-<span class="sourceLineNo">250</span><a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      IOException {<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  }<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.258"></a>
-<span class="sourceLineNo">259</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.259"></a>
-<span class="sourceLineNo">260</span>                     Configuration conf)<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      throws FileNotFoundException, IOException {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    long blockNumCapacity = capacity / blockSize;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      // Enough for about 32TB of cache!<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
-<span class="sourceLineNo">269</span><a name="line.269"></a>
-<span class="sourceLineNo">270</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.275"></a>
+<span class="sourceLineNo">183</span>  private final String persistencePath;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  private final long cacheCapacity;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  /** Approximate block size */<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  private final long blockSize;<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  private final int ioErrorsTolerationDuration;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  // 1 min<a name="line.190"></a>
+<span class="sourceLineNo">191</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  // reset after a successful read/write.<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  private volatile long ioErrorStartTime = -1;<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;p&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   */<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  @VisibleForTesting<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.207"></a>
+<span class="sourceLineNo">208</span>        @Override<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.210"></a>
+<span class="sourceLineNo">211</span>          if (nameComparison != 0) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>            return nameComparison;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>          }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>          if (a.getOffset() == b.getOffset()) {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>            return 0;<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>            return -1;<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>          return 1;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        }<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      });<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.224"></a>
+<span class="sourceLineNo">225</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>  // Allocate or free space for the block<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private BucketAllocator bucketAllocator;<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  private float acceptableFactor;<a name="line.232"></a>
+<span class="sourceLineNo">233</span><a name="line.233"></a>
+<span class="sourceLineNo">234</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  private float minFactor;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private float extraFreeFactor;<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>  /** Single access bucket size */<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  private float singleFactor;<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /** Multiple access bucket size */<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private float multiFactor;<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>  /** In-memory bucket size */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private float memoryFactor;<a name="line.247"></a>
+<span class="sourceLineNo">248</span><a name="line.248"></a>
+<span class="sourceLineNo">249</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      IOException {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  }<a name="line.254"></a>
+<span class="sourceLineNo">255</span><a name="line.255"></a>
+<span class="sourceLineNo">256</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.256"></a>
+<span class="sourceLineNo">257</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.257"></a>
+<span class="sourceLineNo">258</span>                     Configuration conf)<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      throws FileNotFoundException, IOException {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    long blockNumCapacity = capacity / blockSize;<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      // Enough for about 32TB of cache!<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
+<span class="sourceLineNo">267</span><a name="line.267"></a>
+<span class="sourceLineNo">268</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>    sanityCheckConfigs();<a name="line.275"></a>
 <span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    sanityCheckConfigs();<a name="line.277"></a>
-<span class="sourceLineNo">278</span><a name="line.278"></a>
-<span class="sourceLineNo">279</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.280"></a>
-<span class="sourceLineNo">281</span>        ", memoryFactor: " + memoryFactor);<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    this.cacheCapacity = capacity;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    this.persistencePath = persistencePath;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    this.blockSize = blockSize;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.286"></a>
-<span class="sourceLineNo">287</span><a name="line.287"></a>
-<span class="sourceLineNo">288</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>    assert writerQueues.size() == writerThreads.length;<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.294"></a>
+<span class="sourceLineNo">277</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.277"></a>
+<span class="sourceLineNo">278</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.278"></a>
+<span class="sourceLineNo">279</span>        ", memoryFactor: " + memoryFactor);<a name="line.279"></a>
+<span class="sourceLineNo">280</span><a name="line.280"></a>
+<span class="sourceLineNo">281</span>    this.cacheCapacity = capacity;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    this.persistencePath = persistencePath;<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    this.blockSize = blockSize;<a name="line.283"></a>
+<span class="sourceLineNo">284</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    }<a name="line.289"></a>
+<span class="sourceLineNo">290</span><a name="line.290"></a>
+<span class="sourceLineNo">291</span>    assert writerQueues.size() == writerThreads.length;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.294"></a>
 <span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        retrieveFromFile(bucketSizes);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      } catch (IOException ioex) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        LOG.error("Can't restore from file because of", ioex);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      } catch (ClassNotFoundException cnfe) {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        LOG.error("Can't restore from file in rebuild because can't deserialise",cnfe);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        throw new RuntimeException(cnfe);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    }<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    final String threadName = Thread.currentThread().getName();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    this.cacheEnabled = true;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      writerThreads[i].setDaemon(true);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    startWriterThreads();<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    // every five minutes.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.320"></a>
-<span class="sourceLineNo">321</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  }<a name="line.327"></a>
-<span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  private void sanityCheckConfigs() {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.337"></a>
-<span class="sourceLineNo">338</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  }<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * starting the threads.<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  @VisibleForTesting<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected void startWriterThreads() {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    for (WriterThread thread : writerThreads) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>      thread.start();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">296</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      try {<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        retrieveFromFile(bucketSizes);<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      } catch (IOException ioex) {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>        LOG.error("Can't restore from file[" + persistencePath + "] because of ", ioex);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    }<a name="line.302"></a>
+<span class="sourceLineNo">303</span>    final String threadName = Thread.currentThread().getName();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    this.cacheEnabled = true;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.306"></a>
+<span class="sourceLineNo">307</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>      writerThreads[i].setDaemon(true);<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    startWriterThreads();<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    // every five minutes.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.315"></a>
+<span class="sourceLineNo">316</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.317"></a>
+<span class="sourceLineNo">318</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.318"></a>
+<span class="sourceLineNo">319</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
+<span class="sourceLineNo">323</span><a name="line.323"></a>
+<span class="sourceLineNo">324</span>  private void sanityCheckConfigs() {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.325"></a>
+<span class="sourceLineNo">326</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.327"></a>
+<span class="sourceLineNo">328</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.332"></a>
+<span class="sourceLineNo">333</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.333"></a>
+<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * starting the threads.<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   */<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  @VisibleForTesting<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  protected void startWriterThreads() {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    for (WriterThread thread : writerThreads) {<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      thread.start();<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  }<a name="line.345"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  boolean isCacheEnabled() {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    return this.cacheEnabled;<a name="line.349"></a>
 <span class="sourceLineNo">350</span>  }<a name="line.350"></a>
 <span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>  @VisibleForTesting<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  boolean isCacheEnabled() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    return this.cacheEnabled;<a name="line.354"></a>
+<span class="sourceLineNo">352</span>  @Override<a name="line.352"></a>
+<span class="sourceLineNo">353</span>  public long getMaxSize() {<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    return this.cacheCapacity;<a name="line.354"></a>
 <span class="sourceLineNo">355</span>  }<a name="line.355"></a>
 <span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>  public long getMaxSize() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    return this.cacheCapacity;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  public String getIoEngine() {<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return ioEngine.toString();<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Get the IOEngine from the IO engine name<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   * @param ioEngineName<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param capacity<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   * @param persistencePath<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @return the IOEngine<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   * @throws IOException<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
-<span class="sourceLineNo">374</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      throws IOException {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      // the compatibility<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return new ByteBufferIOEngine(capacity);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    } else {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      throw new IllegalArgumentException(<a name="line.388"></a>
-<span class="sourceLineNo">389</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Cache the block with the specified name and buffer.<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   * @param cacheKey block's cache key<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param buf block buffer<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   */<a name="line.397"></a>
-<span class="sourceLineNo">398</span>  @Override<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    cacheBlock(cacheKey, buf, false);<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  }<a name="line.401"></a>
-<span class="sourceLineNo">402</span><a name="line.402"></a>
-<span class="sourceLineNo">403</span>  /**<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * Cache the block with the specified name and buffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   * @param cacheKey block's cache key<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @param cachedItem block buffer<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   * @param inMemory if block is in-memory<a name="line.407"></a>
-<span class="sourceLineNo">408</span>   */<a name="line.408"></a>
-<span class="sourceLineNo">409</span>  @Override<a name="line.409"></a>
-<span class="sourceLineNo">410</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * Cache the block to ramCache<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   * @param cacheKey block's cache key<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @param cachedItem block buffer<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @param inMemory if block is in-memory<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * @param wait if true, blocking wait when queue is full<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.421"></a>
-<span class="sourceLineNo">422</span>      boolean wait) {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    if (cacheEnabled) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>      } else {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      boolean inMemory, boolean wait) {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    if (!cacheEnabled) {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      return;<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    }<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.439"></a>
-<span class="sourceLineNo">440</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    RAMQueueEntry re =<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.447"></a>
-<span class="sourceLineNo">448</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.448"></a>
-<span class="sourceLineNo">449</span>     */<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      return;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    boolean successfulAddition = false;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    if (wait) {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      try {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.458"></a>
-<span class="sourceLineNo">459</span>      } catch (InterruptedException e) {<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        Thread.currentThread().interrupt();<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    } else {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>      successfulAddition = bq.offer(re);<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    }<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    if (!successfulAddition) {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>      ramCache.remove(cacheKey);<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      cacheStats.failInsert();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    } else {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      this.blockNumber.increment();<a name="line.469"></a>
-<span class="sourceLineNo">470</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      blocksByHFile.add(cacheKey);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    }<a name="line.472"></a>
-<span class="sourceLineNo">473</span>  }<a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Get the buffer of the block with the specified key.<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   * @param key block's cache key<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   */<a name="line.482"></a>
-<span class="sourceLineNo">483</span>  @Override<a name="line.483"></a>
-<span class="sourceLineNo">484</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      boolean updateCacheMetrics) {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    if (!cacheEnabled) {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>      return null;<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    }<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    if (re != null) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      if (updateCacheMetrics) {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      }<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      re.access(accessCount.incrementAndGet());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      return re.getData();<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    if (bucketEntry != null) {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      long start = System.nanoTime();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      try {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        lock.readLock().lock();<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // existence here.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>          // TODO : change this area - should be removed after server cells and<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // 12295 are available<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          int len = bucketEntry.getLength();<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          if (LOG.isTraceEnabled()) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.511"></a>
-<span class="sourceLineNo">512</span>          }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.513"></a>
-<span class="sourceLineNo">514</span>              bucketEntry.deserializerReference(this.deserialiserMap));<a name="line.514"></a>
-<span class="sourceLineNo">515</span>          long timeTaken = System.nanoTime() - start;<a name="line.515"></a>
-<span class="sourceLineNo">516</span>          if (updateCacheMetrics) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.517"></a>
-<span class="sourceLineNo">518</span>            cacheStats.ioHit(timeTaken);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>            bucketEntry.incrementRefCountAndGet();<a name="line.521"></a>
-<span class="sourceLineNo">522</span>          }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.523"></a>
-<span class="sourceLineNo">524</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>            ioErrorStartTime = -1;<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          }<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          return cachedBlock;<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        }<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      } catch (IOException ioex) {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        LOG.error("Failed reading block " + key + " from bucket cache", ioex);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>        checkIOErrorIsTolerated();<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      } finally {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        lock.readLock().unlock();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      }<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>    if (!repeat &amp;&amp; updateCacheMetrics) {<a name="line.536"></a>
-<span class="sourceLineNo">537</span>      cacheStats.miss(caching, key.isPrimary(), key.getBlockType());<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    }<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    return null;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  }<a name="line.540"></a>
-<span class="sourceLineNo">541</span><a name="line.541"></a>
-<span class="sourceLineNo">542</span>  @VisibleForTesting<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  void blockEvicted(BlockCacheKey cacheKey, BucketEntry bucketEntry, boolean decrementBlockNumber) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    bucketAllocator.freeBlock(bucketEntry.offset());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    realCacheSize.add(-1 * bucketEntry.getLength());<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    blocksByHFile.remove(cacheKey);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    if (decrementBlockNumber) {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.blockNumber.decrement();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
+<span class="sourceLineNo">357</span>  public String getIoEngine() {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    return ioEngine.toString();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Get the IOEngine from the IO engine name<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * @param ioEngineName<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * @param capacity<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * @param persistencePath<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return the IOEngine<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * @throws IOException<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throws IOException {<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.372"></a>
+<span class="sourceLineNo">373</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      // the compatibility<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.375"></a>
+<span class="sourceLineNo">376</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.378"></a>
+<span class="sourceLineNo">379</span>      return new ByteBufferIOEngine(capacity);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    } else {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      throw new IllegalArgumentException(<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  }<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * Cache the block with the specified name and buffer.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @param cacheKey block's cache key<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   * @param buf block buffer<a name="line.391"></a>
+<span class="sourceLineNo">392</span>   */<a name="line.392"></a>
+<span class="sourceLineNo">393</span>  @Override<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    cacheBlock(cacheKey, buf, false);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   * Cache the block with the specified name and buffer.<a name="line.399"></a>
+<span class="sourceLineNo">400</span>   * @param cacheKey block's cache key<a name="line.400"></a>
+<span class="sourceLineNo">401</span>   * @param cachedItem block buffer<a name="line.401"></a>
+<span class="sourceLineNo">402</span>   * @param inMemory if block is in-memory<a name="line.402"></a>
+<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  @Override<a name="line.404"></a>
+<span class="sourceLineNo">405</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * Cache the block to ramCache<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   * @param cacheKey block's cache key<a name="line.411"></a>
+<span class="sourceLineNo">412</span>   * @param cachedItem block buffer<a name="line.412"></a>
+<span class="sourceLineNo">413</span>   * @param inMemory if block is in-memory<a name="line.413"></a>
+<span class="sourceLineNo">414</span>   * @param wait if true, blocking wait when queue is full<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.416"></a>
+<span class="sourceLineNo">417</span>      boolean wait) {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    if (cacheEnabled) {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.420"></a>
+<span class="sourceLineNo">421</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.421"></a>
+<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      } else {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      }<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  }<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      boolean inMemory, boolean wait) {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    if (!cacheEnabled) {<a name="line.431"></a>
+<span class="sourceLineNo">432</span>      return;<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    RAMQueueEntry re =<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    /**<a name="line.438"></a>
+<span class="sourceLineNo">439</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.439"></a>
+<span class="sourceLineNo">440</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.440"></a>
+<span class="sourceLineNo">441</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.441"></a>
+<span class="sourceLineNo">442</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.442"></a>
+<span class="sourceLineNo">443</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>     */<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      return;<a name="line.446"></a>
+<span class="sourceLineNo">447</span>    }<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    boolean successfulAddition = false;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    if (wait) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      try {<a name="line.452"></a>
+<span class="sourceLineNo">453</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.453"></a>
+<span class="sourceLineNo">454</span>      } catch (InterruptedException e) {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>        Thread.currentThread().interrupt();<a name="line.455"></a>
+<span class="sourceLineNo">456</span>      }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>    } else {<a name="line.457"></a>
+<span class="sourceLineNo">458</span>      successfulAddition = bq.offer(re);<a name="line.458"></a>
+<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
+<span class="sourceLineNo">460</span>    if (!successfulAddition) {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>      ramCache.remove(cacheKey);<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      cacheStats.failInsert();<a name="line.462"></a>
+<span class="sourceLineNo">463</span>    } else {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      this.blockNumber.increment();<a name="line.464"></a>
+<span class="sourceLineNo">465</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.465"></a>
+<span class="sourceLineNo">466</span>      blocksByHFile.add(cacheKey);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>    }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
+<span class="sourceLineNo">469</span><a name="line.469"></a>
+<span class="sourceLineNo">470</span>  /**<a name="line.470"></a>
+<span class="sourceLineNo">471</span>   * Get the buffer of the block with the specified key.<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @param key block's cache key<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.474"></a>
+<span class="sourceLineNo">475</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.475"></a>
+<span class="sourceLineNo">476</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
+<span class="sourceLineNo">478</span>  @Override<a name="line.478"></a>
+<span class="sourceLineNo">479</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.479"></a>
+<span class="sourceLineNo">480</span>      boolean updateCacheMetrics) {<a name="line.480"></a>
+<span class="sourceLineNo">481</span>    if (!cacheEnabled) {<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      return null;<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    }<a name="line.483"></a>
+<span class="sourceLineNo">484</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    if (re != null) {<a name="line.485"></a>
+<span class="sourceLineNo">486</span>      if (updateCacheMetrics) {<a name="line.486"></a>
+<span class="sourceLineNo">487</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.487"></a>
+<span class="sourceLineNo">488</span>      }<a name="line.488"></a>
+<span class="sourceLineNo">489</span>      re.access(accessCount.incrementAndGet());<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      return re.getData();<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    }<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    if (bucketEntry != null) {<a name="line.493"></a>
+<span class="sourceLineNo">494</span>      long start = System.nanoTime();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.495"></a>
+<span class="sourceLineNo">496</span>      try {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>        lock.readLock().lock();<a name="line.497"></a>
+<span class="sourceLineNo">498</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.498"></a>
+<span class="sourceLineNo">499</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.499"></a>
+<span class="sourceLineNo">500</span>        // existence here.<a name="line.500"></a>
+<span class="sourceLineNo">501</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.501"></a>
+<span class="sourceLineNo">502</span>          // TODO : change this area - should be removed after server cells and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>          // 12295 are available<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          int len = bucketEntry.getLength();<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          if (LOG.isTraceEnabled()) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>          }<a name="line.507"></a>
+<span class="sourceLineNo">508</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.508"></a>
+<span class="sourceLineNo">509</span>              bucketEntry.deserializerReference());<a name="line.509"></a>
+<span class="sourceLineNo">510</span>          long timeTaken = System.nanoTime() - start;<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          if (updateCacheMetrics) {<a name="line.511"></a>
+<span class="sourceLineNo">512</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.512"></a>
+<span class="sourceLineNo">513</span>            cacheStats.ioHit(timeTaken);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
+<span class="sourceLineNo">515</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>            bucketEntry.incrementRefCountAndGet();<a name="line.516"></a>
+<span class="sourceLineNo">517</span>          }<a name="line.517"></a>
+<span class="sourceLineNo">518</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.518"></a>
+<span class="sourceLineNo">519</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.519"></a>
+<span class="sourceLineNo">520</span>            ioErrorStartTime = -1;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          }<a name="line.521"></a>
+<span class="sourceLineNo">522</span>          return cachedBlock;<a name="line.522"></a>
+<span class="sourceLineNo">523</span>        }<a name="line.523"></a>
+<span class="sourceLi

<TRUNCATED>

[41/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
index dccbeab..7b93965 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":9,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":9,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -297,134 +297,130 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#DEFAULT_WRITER_THREADS">DEFAULT_WRITER_THREADS</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#deserialiserMap">deserialiserMap</a></span></code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#EXTRA_FREE_FACTOR_CONFIG_NAME">EXTRA_FREE_FACTOR_CONFIG_NAME</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private float</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#extraFreeFactor">extraFreeFactor</a></span></code>
 <div class="block">Free this floating point factor of extra blocks when evicting.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#freeInProgress">freeInProgress</a></span></code>
 <div class="block">Volatile boolean to track if free space is in process or not</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/Lock.html?is-external=true" title="class or interface in java.util.concurrent.locks">Lock</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#freeSpaceLock">freeSpaceLock</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#heapSize">heapSize</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#ioEngine">ioEngine</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#ioErrorStartTime">ioErrorStartTime</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#ioErrorsTolerationDuration">ioErrorsTolerationDuration</a></span></code>
 <div class="block">Duration of IO errors tolerated before we disable cache, 1 min as default</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#MEMORY_FACTOR_CONFIG_NAME">MEMORY_FACTOR_CONFIG_NAME</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private float</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#memoryFactor">memoryFactor</a></span></code>
 <div class="block">In-memory bucket size</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#MIN_FACTOR_CONFIG_NAME">MIN_FACTOR_CONFIG_NAME</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private float</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#minFactor">minFactor</a></span></code>
 <div class="block">Minimum threshold of cache (when evicting, evict until size < min)</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#MULTI_FACTOR_CONFIG_NAME">MULTI_FACTOR_CONFIG_NAME</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private float</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#multiFactor">multiFactor</a></span></code>
 <div class="block">Multiple access bucket size</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" title="class in org.apache.hadoop.hbase.util">IdReadWriteLock</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#offsetLock">offsetLock</a></span></code>
 <div class="block">A ReentrantReadWriteLock to lock on a particular block identified by offset.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#persistencePath">persistencePath</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#ramCache">ramCache</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#realCacheSize">realCacheSize</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ScheduledExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ScheduledExecutorService</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#scheduleThreadPool">scheduleThreadPool</a></span></code>
 <div class="block">Statistics thread schedule pool (for heavy debugging, could remove)</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#SINGLE_FACTOR_CONFIG_NAME">SINGLE_FACTOR_CONFIG_NAME</a></span></code>
 <div class="block">Priority buckets config</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private float</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#singleFactor">singleFactor</a></span></code>
 <div class="block">Single access bucket size</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#statThreadPeriod">statThreadPeriod</a></span></code>
 <div class="block">Statistics thread</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#wait_when_cache">wait_when_cache</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) <a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#writerQueues">writerQueues</a></span></code>
 <div class="block">A list of writer queues.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.WriterThread</a>[]</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#writerThreads">writerThreads</a></span></code>&nbsp;</td>
 </tr>
@@ -535,33 +531,39 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#checkRamCache-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-">checkRamCache</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</code>&nbsp;</td>
 </tr>
 <tr id="i9" class="rowColor">
+<td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/io/FileInputStream.html?is-external=true" title="class or interface in java.io">FileInputStream</a></code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#deleteFileOnClose-java.io.File-">deleteFileOnClose</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/File.html?is-external=true" title="class or interface in java.io">File</a>&nbsp;file)</code>
+<div class="block">Create an input stream that deletes the file after reading it.</div>
+</td>
+</tr>
+<tr id="i10" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#disableCache--">disableCache</a></span>()</code>
 <div class="block">Used to shut down the cache -or- turn it off in the case of something broken.</div>
 </td>
 </tr>
-<tr id="i10" class="altColor">
+<tr id="i11" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#evictBlock-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-">evictBlock</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</code>
 <div class="block">Evict block from cache.</div>
 </td>
 </tr>
-<tr id="i11" class="rowColor">
+<tr id="i12" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#evictBlock-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-boolean-">evictBlock</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
           boolean&nbsp;deletedBlock)</code>&nbsp;</td>
 </tr>
-<tr id="i12" class="altColor">
+<tr id="i13" class="rowColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#evictBlocksByHfileName-java.lang.String-">evictBlocksByHfileName</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hfileName)</code>
 <div class="block">Evicts all blocks for a specific HFile.</div>
 </td>
 </tr>
-<tr id="i13" class="rowColor">
+<tr id="i14" class="altColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#forceEvict-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-">forceEvict</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</code>&nbsp;</td>
 </tr>
-<tr id="i14" class="altColor">
+<tr id="i15" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#freeEntireBuckets-int-">freeEntireBuckets</a></span>(int&nbsp;completelyFreeBucketsNeeded)</code>
 <div class="block">This method will find the buckets that are minimally occupied
@@ -571,22 +573,22 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
  passed, sometimes it might not due to changing refCounts</div>
 </td>
 </tr>
-<tr id="i15" class="rowColor">
+<tr id="i16" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#freeSpace-java.lang.String-">freeSpace</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</code>
 <div class="block">Free the space if the used size reaches acceptableSize() or one size block
  couldn't be allocated.</div>
 </td>
 </tr>
-<tr id="i16" class="altColor">
+<tr id="i17" class="rowColor">
 <td class="colFirst"><code>(package private) float</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getAcceptableFactor--">getAcceptableFactor</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i17" class="rowColor">
+<tr id="i18" class="altColor">
 <td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getAllocator--">getAllocator</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i18" class="altColor">
+<tr id="i19" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getBlock-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-boolean-boolean-boolean-">getBlock</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;key,
         boolean&nbsp;caching,
@@ -595,49 +597,49 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <div class="block">Get the buffer of the block with the specified key.</div>
 </td>
 </tr>
-<tr id="i19" class="rowColor">
+<tr id="i20" class="altColor">
 <td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>[]</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getBlockCaches--">getBlockCaches</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i20" class="altColor">
+<tr id="i21" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getBlockCount--">getBlockCount</a></span>()</code>
 <div class="block">Returns the number of blocks currently cached in the block cache.</div>
 </td>
 </tr>
-<tr id="i21" class="rowColor">
+<tr id="i22" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getCurrentDataSize--">getCurrentDataSize</a></span>()</code>
 <div class="block">Returns the occupied size of data blocks, in bytes.</div>
 </td>
 </tr>
-<tr id="i22" class="altColor">
+<tr id="i23" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getCurrentSize--">getCurrentSize</a></span>()</code>
 <div class="block">Returns the occupied size of the block cache, in bytes.</div>
 </td>
 </tr>
-<tr id="i23" class="rowColor">
+<tr id="i24" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getDataBlockCount--">getDataBlockCount</a></span>()</code>
 <div class="block">Returns the number of data blocks currently cached in the block cache.</div>
 </td>
 </tr>
-<tr id="i24" class="altColor">
+<tr id="i25" class="rowColor">
 <td class="colFirst"><code>(package private) float</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getExtraFreeFactor--">getExtraFreeFactor</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i25" class="rowColor">
+<tr id="i26" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getFreeSize--">getFreeSize</a></span>()</code>
 <div class="block">Returns the free size of the block cache, in bytes.</div>
 </td>
 </tr>
-<tr id="i26" class="altColor">
+<tr id="i27" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getIoEngine--">getIoEngine</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i27" class="rowColor">
+<tr id="i28" class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getIOEngineFromName-java.lang.String-long-java.lang.String-">getIOEngineFromName</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;ioEngineName,
                    long&nbsp;capacity,
@@ -645,29 +647,29 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <div class="block">Get the IOEngine from the IO engine name</div>
 </td>
 </tr>
-<tr id="i28" class="altColor">
+<tr id="i29" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getMaxSize--">getMaxSize</a></span>()</code>
 <div class="block">Returns the Max size of the block cache, in bytes.</div>
 </td>
 </tr>
-<tr id="i29" class="rowColor">
+<tr id="i30" class="altColor">
 <td class="colFirst"><code>(package private) float</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getMemoryFactor--">getMemoryFactor</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i30" class="altColor">
+<tr id="i31" class="rowColor">
 <td class="colFirst"><code>(package private) float</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getMinFactor--">getMinFactor</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i31" class="rowColor">
+<tr id="i32" class="altColor">
 <td class="colFirst"><code>(package private) float</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getMultiFactor--">getMultiFactor</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i32" class="altColor">
+<tr id="i33" class="rowColor">
 <td class="colFirst"><code>(package private) long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getPartitionSize-float-">getPartitionSize</a></span>(float&nbsp;partitionFactor)</code>&nbsp;</td>
 </tr>
-<tr id="i33" class="rowColor">
+<tr id="i34" class="altColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getRAMQueueEntries-java.util.concurrent.BlockingQueue-java.util.List-">getRAMQueueEntries</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;q,
                   <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;receptacle)</code>
@@ -675,53 +677,57 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
  before returning.</div>
 </td>
 </tr>
-<tr id="i34" class="altColor">
+<tr id="i35" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getRealCacheSize--">getRealCacheSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i35" class="rowColor">
+<tr id="i36" class="altColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getRefCount-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-">getRefCount</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</code>&nbsp;</td>
 </tr>
-<tr id="i36" class="altColor">
+<tr id="i37" class="rowColor">
 <td class="colFirst"><code>(package private) float</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getSingleFactor--">getSingleFactor</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i37" class="rowColor">
+<tr id="i38" class="altColor">
 <td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheStats</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#getStats--">getStats</a></span>()</code>
 <div class="block">Get the statistics for this block cache.</div>
 </td>
 </tr>
-<tr id="i38" class="altColor">
+<tr id="i39" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#heapSize--">heapSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i39" class="rowColor">
+<tr id="i40" class="altColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#isCacheEnabled--">isCacheEnabled</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i40" class="altColor">
+<tr id="i41" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CachedBlock.html" title="interface in org.apache.hadoop.hbase.io.hfile">CachedBlock</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#iterator--">iterator</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i41" class="rowColor">
+<tr id="i42" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#join--">join</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i42" class="altColor">
+<tr id="i43" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#logStats--">logStats</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i43" class="rowColor">
+<tr id="i44" class="altColor">
+<td class="colFirst"><code>private void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#parsePB-org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry-">parsePB</a></span>(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry&nbsp;proto)</code>&nbsp;</td>
+</tr>
+<tr id="i45" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#persistToFile--">persistToFile</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i44" class="altColor">
+<tr id="i46" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#retrieveFromFile-int:A-">retrieveFromFile</a></span>(int[]&nbsp;bucketSizes)</code>&nbsp;</td>
 </tr>
-<tr id="i45" class="rowColor">
+<tr id="i47" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#returnBlock-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-org.apache.hadoop.hbase.io.hfile.Cacheable-">returnBlock</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
            <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;block)</code>
@@ -729,34 +735,40 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
  is over.</div>
 </td>
 </tr>
-<tr id="i46" class="altColor">
+<tr id="i48" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#sanityCheckConfigs--">sanityCheckConfigs</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i47" class="rowColor">
+<tr id="i49" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#shutdown--">shutdown</a></span>()</code>
 <div class="block">Shutdown the cache.</div>
 </td>
 </tr>
-<tr id="i48" class="altColor">
+<tr id="i50" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#size--">size</a></span>()</code>
 <div class="block">Returns the total size of the block cache, in bytes.</div>
 </td>
 </tr>
-<tr id="i49" class="rowColor">
+<tr id="i51" class="rowColor">
 <td class="colFirst"><code>protected void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#startWriterThreads--">startWriterThreads</a></span>()</code>
 <div class="block">Called by the constructor to start the writer threads.</div>
 </td>
 </tr>
-<tr id="i50" class="altColor">
+<tr id="i52" class="altColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#stopWriterThreads--">stopWriterThreads</a></span>()</code>
 <div class="block">Only used in test</div>
 </td>
 </tr>
+<tr id="i53" class="rowColor">
+<td class="colFirst"><code>private void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#verifyCapacityAndClasses-long-java.lang.String-java.lang.String-">verifyCapacityAndClasses</a></span>(long&nbsp;capacitySize,
+                        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;ioclass,
+                        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;mapclass)</code>&nbsp;</td>
+</tr>
 </table>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
@@ -1087,22 +1099,13 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/Lock.html?is-external=true" title="class or interface in java.util.concurrent.locks">Lock</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.165">freeSpaceLock</a></pre>
 </li>
 </ul>
-<a name="deserialiserMap">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>deserialiserMap</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.167">deserialiserMap</a></pre>
-</li>
-</ul>
 <a name="realCacheSize">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>realCacheSize</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.169">realCacheSize</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.167">realCacheSize</a></pre>
 </li>
 </ul>
 <a name="heapSize">
@@ -1111,7 +1114,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>heapSize</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.170">heapSize</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.168">heapSize</a></pre>
 </li>
 </ul>
 <a name="blockNumber">
@@ -1120,7 +1123,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>blockNumber</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.172">blockNumber</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.170">blockNumber</a></pre>
 <div class="block">Current number of cached elements</div>
 </li>
 </ul>
@@ -1130,7 +1133,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>accessCount</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.175">accessCount</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.173">accessCount</a></pre>
 <div class="block">Cache access count (sequential ID)</div>
 </li>
 </ul>
@@ -1140,7 +1143,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CACHE_WAIT_TIME</h4>
-<pre>private static final&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.177">DEFAULT_CACHE_WAIT_TIME</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.175">DEFAULT_CACHE_WAIT_TIME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.DEFAULT_CACHE_WAIT_TIME">Constant Field Values</a></dd>
@@ -1153,7 +1156,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>wait_when_cache</h4>
-<pre>boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.181">wait_when_cache</a></pre>
+<pre>boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.179">wait_when_cache</a></pre>
 </li>
 </ul>
 <a name="cacheStats">
@@ -1162,7 +1165,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheStats</h4>
-<pre>private final&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCacheStats</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.183">cacheStats</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCacheStats</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.181">cacheStats</a></pre>
 </li>
 </ul>
 <a name="persistencePath">
@@ -1171,7 +1174,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>persistencePath</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.185">persistencePath</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.183">persistencePath</a></pre>
 </li>
 </ul>
 <a name="cacheCapacity">
@@ -1180,7 +1183,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheCapacity</h4>
-<pre>private final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.186">cacheCapacity</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.184">cacheCapacity</a></pre>
 </li>
 </ul>
 <a name="blockSize">
@@ -1189,7 +1192,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>blockSize</h4>
-<pre>private final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.188">blockSize</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.186">blockSize</a></pre>
 <div class="block">Approximate block size</div>
 </li>
 </ul>
@@ -1199,7 +1202,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>ioErrorsTolerationDuration</h4>
-<pre>private final&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.191">ioErrorsTolerationDuration</a></pre>
+<pre>private final&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.189">ioErrorsTolerationDuration</a></pre>
 <div class="block">Duration of IO errors tolerated before we disable cache, 1 min as default</div>
 </li>
 </ul>
@@ -1209,7 +1212,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_ERROR_TOLERATION_DURATION</h4>
-<pre>public static final&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.193">DEFAULT_ERROR_TOLERATION_DURATION</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.191">DEFAULT_ERROR_TOLERATION_DURATION</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.DEFAULT_ERROR_TOLERATION_DURATION">Constant Field Values</a></dd>
@@ -1222,7 +1225,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>ioErrorStartTime</h4>
-<pre>private volatile&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.197">ioErrorStartTime</a></pre>
+<pre>private volatile&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.195">ioErrorStartTime</a></pre>
 </li>
 </ul>
 <a name="offsetLock">
@@ -1231,7 +1234,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>offsetLock</h4>
-<pre>final&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" title="class in org.apache.hadoop.hbase.util">IdReadWriteLock</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.206">offsetLock</a></pre>
+<pre>final&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" title="class in org.apache.hadoop.hbase.util">IdReadWriteLock</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.204">offsetLock</a></pre>
 <div class="block">A ReentrantReadWriteLock to lock on a particular block identified by offset.
  The purpose of this is to avoid freeing the block which is being read.
  <p>
@@ -1244,7 +1247,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>blocksByHFile</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true" title="class or interface in java.util">NavigableSet</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.208">blocksByHFile</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true" title="class or interface in java.util">NavigableSet</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.206">blocksByHFile</a></pre>
 </li>
 </ul>
 <a name="scheduleThreadPool">
@@ -1253,7 +1256,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>scheduleThreadPool</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ScheduledExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ScheduledExecutorService</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.227">scheduleThreadPool</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ScheduledExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ScheduledExecutorService</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.225">scheduleThreadPool</a></pre>
 <div class="block">Statistics thread schedule pool (for heavy debugging, could remove)</div>
 </li>
 </ul>
@@ -1263,7 +1266,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>bucketAllocator</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.231">bucketAllocator</a></pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.229">bucketAllocator</a></pre>
 </li>
 </ul>
 <a name="acceptableFactor">
@@ -1272,7 +1275,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>acceptableFactor</h4>
-<pre>private&nbsp;float <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.234">acceptableFactor</a></pre>
+<pre>private&nbsp;float <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.232">acceptableFactor</a></pre>
 <div class="block">Acceptable size of cache (no evictions if size < acceptable)</div>
 </li>
 </ul>
@@ -1282,7 +1285,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>minFactor</h4>
-<pre>private&nbsp;float <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.237">minFactor</a></pre>
+<pre>private&nbsp;float <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.235">minFactor</a></pre>
 <div class="block">Minimum threshold of cache (when evicting, evict until size < min)</div>
 </li>
 </ul>
@@ -1292,7 +1295,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>extraFreeFactor</h4>
-<pre>private&nbsp;float <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.240">extraFreeFactor</a></pre>
+<pre>private&nbsp;float <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.238">extraFreeFactor</a></pre>
 <div class="block">Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor)</div>
 </li>
 </ul>
@@ -1302,7 +1305,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>singleFactor</h4>
-<pre>private&nbsp;float <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.243">singleFactor</a></pre>
+<pre>private&nbsp;float <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.241">singleFactor</a></pre>
 <div class="block">Single access bucket size</div>
 </li>
 </ul>
@@ -1312,7 +1315,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>multiFactor</h4>
-<pre>private&nbsp;float <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.246">multiFactor</a></pre>
+<pre>private&nbsp;float <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.244">multiFactor</a></pre>
 <div class="block">Multiple access bucket size</div>
 </li>
 </ul>
@@ -1322,7 +1325,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockListLast">
 <li class="blockList">
 <h4>memoryFactor</h4>
-<pre>private&nbsp;float <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.249">memoryFactor</a></pre>
+<pre>private&nbsp;float <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.247">memoryFactor</a></pre>
 <div class="block">In-memory bucket size</div>
 </li>
 </ul>
@@ -1340,7 +1343,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>BucketCache</h4>
-<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.251">BucketCache</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;ioEngineName,
+<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.249">BucketCache</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;ioEngineName,
                    long&nbsp;capacity,
                    int&nbsp;blockSize,
                    int[]&nbsp;bucketSizes,
@@ -1362,7 +1365,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BucketCache</h4>
-<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.258">BucketCache</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;ioEngineName,
+<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.256">BucketCache</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;ioEngineName,
                    long&nbsp;capacity,
                    int&nbsp;blockSize,
                    int[]&nbsp;bucketSizes,
@@ -1394,7 +1397,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>sanityCheckConfigs</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.329">sanityCheckConfigs</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.324">sanityCheckConfigs</a>()</pre>
 </li>
 </ul>
 <a name="startWriterThreads--">
@@ -1403,7 +1406,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>startWriterThreads</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.346">startWriterThreads</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.341">startWriterThreads</a>()</pre>
 <div class="block">Called by the constructor to start the writer threads. Used by tests that need to override
  starting the threads.</div>
 </li>
@@ -1414,7 +1417,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>isCacheEnabled</h4>
-<pre>boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.353">isCacheEnabled</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.348">isCacheEnabled</a>()</pre>
 </li>
 </ul>
 <a name="getMaxSize--">
@@ -1423,7 +1426,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getMaxSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.358">getMaxSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.353">getMaxSize</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#getMaxSize--">BlockCache</a></code></span></div>
 <div class="block">Returns the Max size of the block cache, in bytes.</div>
 <dl>
@@ -1440,7 +1443,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getIoEngine</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.362">getIoEngine</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.357">getIoEngine</a>()</pre>
 </li>
 </ul>
 <a name="getIOEngineFromName-java.lang.String-long-java.lang.String-">
@@ -1449,7 +1452,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getIOEngineFromName</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.374">getIOEngineFromName</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;ioEngineName,
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.369">getIOEngineFromName</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;ioEngineName,
                                      long&nbsp;capacity,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;persistencePath)
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1472,7 +1475,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheBlock</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.399">cacheBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.394">cacheBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
                        <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;buf)</pre>
 <div class="block">Cache the block with the specified name and buffer.</div>
 <dl>
@@ -1490,7 +1493,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheBlock</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.410">cacheBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.405">cacheBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
                        <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;cachedItem,
                        boolean&nbsp;inMemory)</pre>
 <div class="block">Cache the block with the specified name and buffer.</div>
@@ -1510,7 +1513,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheBlockWithWait</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.421">cacheBlockWithWait</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.416">cacheBlockWithWait</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
                                 <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;cachedItem,
                                 boolean&nbsp;inMemory,
                                 boolean&nbsp;wait)</pre>
@@ -1530,7 +1533,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheBlockWithWaitInternal</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.434">cacheBlockWithWaitInternal</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.429">cacheBlockWithWaitInternal</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
                                         <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;cachedItem,
                                         boolean&nbsp;inMemory,
                                         boolean&nbsp;wait)</pre>
@@ -1542,7 +1545,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlock</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.484">getBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;key,
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.479">getBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;key,
                           boolean&nbsp;caching,
                           boolean&nbsp;repeat,
                           boolean&nbsp;updateCacheMetrics)</pre>
@@ -1566,7 +1569,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>blockEvicted</h4>
-<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.543">blockEvicted</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
+<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.538">blockEvicted</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
                   <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&nbsp;bucketEntry,
                   boolean&nbsp;decrementBlockNumber)</pre>
 </li>
@@ -1577,7 +1580,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>evictBlock</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.553">evictBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.548">evictBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#evictBlock-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-">BlockCache</a></code></span></div>
 <div class="block">Evict block from cache.</div>
 <dl>
@@ -1596,7 +1599,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>forceEvict</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.559">forceEvict</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.554">forceEvict</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
 </li>
 </ul>
 <a name="checkRamCache-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-">
@@ -1605,7 +1608,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>checkRamCache</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.588">checkRamCache</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.583">checkRamCache</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
 </li>
 </ul>
 <a name="evictBlock-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-boolean-">
@@ -1614,7 +1617,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>evictBlock</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.597">evictBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.592">evictBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
                           boolean&nbsp;deletedBlock)</pre>
 </li>
 </ul>
@@ -1624,7 +1627,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>logStats</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.662">logStats</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.657">logStats</a>()</pre>
 </li>
 </ul>
 <a name="getRealCacheSize--">
@@ -1633,7 +1636,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getRealCacheSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.688">getRealCacheSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.683">getRealCacheSize</a>()</pre>
 </li>
 </ul>
 <a name="acceptableSize--">
@@ -1642,7 +1645,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>acceptableSize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.692">acceptableSize</a>()</pre>
+<pre>private&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.687">acceptableSize</a>()</pre>
 </li>
 </ul>
 <a name="getPartitionSize-float-">
@@ -1651,7 +1654,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getPartitionSize</h4>
-<pre>long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.697">getPartitionSize</a>(float&nbsp;partitionFactor)</pre>
+<pre>long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.692">getPartitionSize</a>(float&nbsp;partitionFactor)</pre>
 </li>
 </ul>
 <a name="bucketSizesAboveThresholdCount-float-">
@@ -1660,7 +1663,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>bucketSizesAboveThresholdCount</h4>
-<pre>private&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.704">bucketSizesAboveThresholdCount</a>(float&nbsp;minFactor)</pre>
+<pre>private&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.699">bucketSizesAboveThresholdCount</a>(float&nbsp;minFactor)</pre>
 <div class="block">Return the count of bucketSizeinfos still need free space</div>
 </li>
 </ul>
@@ -1670,7 +1673,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>freeEntireBuckets</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.726">freeEntireBuckets</a>(int&nbsp;completelyFreeBucketsNeeded)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.721">freeEntireBuckets</a>(int&nbsp;completelyFreeBucketsNeeded)</pre>
 <div class="block">This method will find the buckets that are minimally occupied
  and are not reference counted and will free them completely
  without any constraint on the access times of the elements,
@@ -1688,7 +1691,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>freeSpace</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.754">freeSpace</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.749">freeSpace</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
 <div class="block">Free the space if the used size reaches acceptableSize() or one size block
  couldn't be allocated. When freeing the space, we use the LRU algorithm and
  ensure there must be some blocks evicted</div>
@@ -1704,7 +1707,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getRAMQueueEntries</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1075">getRAMQueueEntries</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;q,
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1070">getRAMQueueEntries</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;q,
                                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;receptacle)
                                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Blocks until elements available in <code>q</code> then tries to grab as many as possible
@@ -1727,11 +1730,13 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>persistToFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1086">persistToFile</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1084">persistToFile</a>()
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
+<dt><span class="seeLabel">See Also:</span></dt>
+<dd><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#retrieveFromFile-int:A-"><code>retrieveFromFile(int[])</code></a></dd>
 </dl>
 </li>
 </ul>
@@ -1741,15 +1746,71 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>retrieveFromFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1108">retrieveFromFile</a>(int[]&nbsp;bucketSizes)
-                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
-                              <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocatorException.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocatorException</a>,
-                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/ClassNotFoundException.html?is-external=true" title="class or interface in java.lang">ClassNotFoundException</a></pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1098">retrieveFromFile</a>(int[]&nbsp;bucketSizes)
+                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
+<dt><span class="seeLabel">See Also:</span></dt>
+<dd><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#persistToFile--"><code>persistToFile()</code></a></dd>
+</dl>
+</li>
+</ul>
+<a name="deleteFileOnClose-java.io.File-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>deleteFileOnClose</h4>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/FileInputStream.html?is-external=true" title="class or interface in java.io">FileInputStream</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1139">deleteFileOnClose</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/File.html?is-external=true" title="class or interface in java.io">File</a>&nbsp;file)
+                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Create an input stream that deletes the file after reading it. Use in try-with-resources to
+ avoid this pattern where an exception thrown from a finally block may mask earlier exceptions:
+ <pre>
+   File f = ...
+   try (FileInputStream fis = new FileInputStream(f)) {
+     // use the input stream
+   } finally {
+     if (!f.delete()) throw new IOException("failed to delete");
+   }
+ </pre></div>
+<dl>
+<dt><span class="paramLabel">Parameters:</span></dt>
+<dd><code>file</code> - the file to read and delete</dd>
+<dt><span class="returnLabel">Returns:</span></dt>
+<dd>a FileInputStream for the given file</dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if there is a problem creating the stream</dd>
+</dl>
+</li>
+</ul>
+<a name="verifyCapacityAndClasses-long-java.lang.String-java.lang.String-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>verifyCapacityAndClasses</h4>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1151">verifyCapacityAndClasses</a>(long&nbsp;capacitySize,
+                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;ioclass,
+                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;mapclass)
+                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
+</dl>
+</li>
+</ul>
+<a name="parsePB-org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>parsePB</h4>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1168">parsePB</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BucketCacheEntry&nbsp;proto)
+              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
-<dd><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocatorException.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocatorException</a></code></dd>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/ClassNotFoundException.html?is-external=true" title="class or interface in java.lang">ClassNotFoundException</a></code></dd>
 </dl>
 </li>
 </ul>
@@ -1759,7 +1820,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>checkIOErrorIsTolerated</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1160">checkIOErrorIsTolerated</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1178">checkIOErrorIsTolerated</a>()</pre>
 <div class="block">Check whether we tolerate IO error this time. If the duration of IOEngine
  throwing errors exceeds ioErrorsDurationTimeTolerated, we will disable the
  cache</div>
@@ -1771,7 +1832,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>disableCache</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1176">disableCache</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1194">disableCache</a>()</pre>
 <div class="block">Used to shut down the cache -or- turn it off in the case of something broken.</div>
 </li>
 </ul>
@@ -1781,7 +1842,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>join</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1189

<TRUNCATED>

[35/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/wal/WALFactory.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/WALFactory.html b/devapidocs/org/apache/hadoop/hbase/wal/WALFactory.html
index 6f63a2a..7a02d08 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/WALFactory.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/WALFactory.html
@@ -175,53 +175,49 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#DEFAULT_META_WAL_PROVIDER">DEFAULT_META_WAL_PROVIDER</a></span></code>&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#DEFAULT_WAL_PROVIDER">DEFAULT_WAL_PROVIDER</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#factoryId">factoryId</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractFSWALProvider.Reader.html" title="interface in org.apache.hadoop.hbase.wal">AbstractFSWALProvider.Reader</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#logReaderClass">logReaderClass</a></span></code>
 <div class="block">Configuration-specified WAL Reader used when a custom reader is requested</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#META_WAL_PROVIDER">META_WAL_PROVIDER</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicReference</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#metaProvider">metaProvider</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#provider">provider</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicReference</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#singleton">singleton</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#SINGLETON_ID">SINGLETON_ID</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#timeoutMillis">timeoutMillis</a></span></code>
 <div class="block">How long to attempt opening in-recovery wals</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#WAL_PROVIDER">WAL_PROVIDER</a></span></code>&nbsp;</td>
 </tr>
@@ -360,7 +356,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#getInstance-org.apache.hadoop.conf.Configuration-">getInstance</a></span>(org.apache.hadoop.conf.Configuration&nbsp;configuration)</code>&nbsp;</td>
 </tr>
 <tr id="i13" class="rowColor">
-<td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a></code></td>
+<td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#getMetaProvider--">getMetaProvider</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i14" class="altColor">
@@ -456,22 +452,13 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </dl>
 </li>
 </ul>
-<a name="DEFAULT_META_WAL_PROVIDER">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>DEFAULT_META_WAL_PROVIDER</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.85">DEFAULT_META_WAL_PROVIDER</a></pre>
-</li>
-</ul>
 <a name="factoryId">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>factoryId</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.87">factoryId</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.86">factoryId</a></pre>
 </li>
 </ul>
 <a name="provider">
@@ -480,7 +467,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>provider</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.88">provider</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.87">provider</a></pre>
 </li>
 </ul>
 <a name="metaProvider">
@@ -489,7 +476,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>metaProvider</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicReference</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.92">metaProvider</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicReference</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.91">metaProvider</a></pre>
 </li>
 </ul>
 <a name="logReaderClass">
@@ -498,7 +485,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>logReaderClass</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractFSWALProvider.Reader.html" title="interface in org.apache.hadoop.hbase.wal">AbstractFSWALProvider.Reader</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.97">logReaderClass</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractFSWALProvider.Reader.html" title="interface in org.apache.hadoop.hbase.wal">AbstractFSWALProvider.Reader</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.96">logReaderClass</a></pre>
 <div class="block">Configuration-specified WAL Reader used when a custom reader is requested</div>
 </li>
 </ul>
@@ -508,7 +495,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>timeoutMillis</h4>
-<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.102">timeoutMillis</a></pre>
+<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.101">timeoutMillis</a></pre>
 <div class="block">How long to attempt opening in-recovery wals</div>
 </li>
 </ul>
@@ -518,7 +505,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>private final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.104">conf</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.103">conf</a></pre>
 </li>
 </ul>
 <a name="singleton">
@@ -527,7 +514,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>singleton</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicReference</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.385">singleton</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicReference</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.386">singleton</a></pre>
 </li>
 </ul>
 <a name="SINGLETON_ID">
@@ -536,7 +523,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SINGLETON_ID</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.386">SINGLETON_ID</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.387">SINGLETON_ID</a></pre>
 </li>
 </ul>
 </li>
@@ -553,7 +540,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>WALFactory</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.107">WALFactory</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.106">WALFactory</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 <a name="WALFactory-org.apache.hadoop.conf.Configuration-java.lang.String-">
@@ -562,7 +549,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>WALFactory</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.164">WALFactory</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.163">WALFactory</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;factoryId)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -582,7 +569,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WALFactory</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.178">WALFactory</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.177">WALFactory</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;factoryId,
                   boolean&nbsp;enableSyncReplicationWALProvider)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -613,7 +600,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getProviderClass</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.124">getProviderClass</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;key,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.123">getProviderClass</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;key,
                                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;defaultValue)</pre>
 </li>
 </ul>
@@ -623,7 +610,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createProvider</h4>
-<pre>static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.147">createProvider</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&gt;&nbsp;clazz)
+<pre>static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.146">createProvider</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&gt;&nbsp;clazz)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -637,7 +624,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.210">close</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.209">close</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Shutdown all WALs and clean up any underlying storage.
  Use only when you will not need to replay and edits that have gone to any wals from this
@@ -654,7 +641,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shutdown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.227">shutdown</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.226">shutdown</a>()
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Tell the underlying WAL providers to shut down, but do not clean up underlying storage.
  If you are not ending cleanly and will need to replay edits from this factory's wals,
@@ -671,7 +658,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getWALs</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.html" title="interface in org.apache.hadoop.hbase.wal">WAL</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.243">getWALs</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.html" title="interface in org.apache.hadoop.hbase.wal">WAL</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.242">getWALs</a>()</pre>
 </li>
 </ul>
 <a name="getMetaProvider--">
@@ -680,8 +667,8 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaProvider</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.247">getMetaProvider</a>()
-                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.247">getMetaProvider</a>()
+                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
@@ -694,7 +681,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getWAL</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.html" title="interface in org.apache.hadoop.hbase.wal">WAL</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.268">getWAL</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;region)
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.html" title="interface in org.apache.hadoop.hbase.wal">WAL</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.269">getWAL</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;region)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -710,7 +697,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createReader</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.278">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.279">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                org.apache.hadoop.fs.Path&nbsp;path)
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -725,7 +712,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createReader</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.289">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.290">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                org.apache.hadoop.fs.Path&nbsp;path,
                                <a href="../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a>&nbsp;reporter)
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -746,7 +733,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createReader</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.294">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.295">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                org.apache.hadoop.fs.Path&nbsp;path,
                                <a href="../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a>&nbsp;reporter,
                                boolean&nbsp;allowCustom)
@@ -763,7 +750,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createWALWriter</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.Writer.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider.Writer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.366">createWALWriter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.Writer.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider.Writer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.367">createWALWriter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                           org.apache.hadoop.fs.Path&nbsp;path)
                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Create a writer for the WAL.
@@ -785,7 +772,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createRecoveredEditsWriter</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.Writer.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider.Writer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.376">createRecoveredEditsWriter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.Writer.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider.Writer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.377">createRecoveredEditsWriter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                      org.apache.hadoop.fs.Path&nbsp;path)
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Should be package-private, visible for recovery testing.
@@ -804,7 +791,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getInstance</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.389">getInstance</a>(org.apache.hadoop.conf.Configuration&nbsp;configuration)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.390">getInstance</a>(org.apache.hadoop.conf.Configuration&nbsp;configuration)</pre>
 </li>
 </ul>
 <a name="createReader-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.conf.Configuration-">
@@ -813,7 +800,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createReader</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.413">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.414">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                       org.apache.hadoop.fs.Path&nbsp;path,
                                       org.apache.hadoop.conf.Configuration&nbsp;configuration)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -833,7 +820,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createReader</h4>
-<pre>static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.423">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.424">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                org.apache.hadoop.fs.Path&nbsp;path,
                                org.apache.hadoop.conf.Configuration&nbsp;configuration,
                                <a href="../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a>&nbsp;reporter)
@@ -854,7 +841,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createReaderIgnoreCustomClass</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.434">createReaderIgnoreCustomClass</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.435">createReaderIgnoreCustomClass</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                        org.apache.hadoop.fs.Path&nbsp;path,
                                                        org.apache.hadoop.conf.Configuration&nbsp;configuration)
                                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -875,7 +862,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createRecoveredEditsWriter</h4>
-<pre>static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.Writer.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider.Writer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.444">createRecoveredEditsWriter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.Writer.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider.Writer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.445">createRecoveredEditsWriter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                      org.apache.hadoop.fs.Path&nbsp;path,
                                                      org.apache.hadoop.conf.Configuration&nbsp;configuration)
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -895,7 +882,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createWALWriter</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.Writer.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider.Writer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.456">createWALWriter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.Writer.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider.Writer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.457">createWALWriter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                  org.apache.hadoop.fs.Path&nbsp;path,
                                                  org.apache.hadoop.conf.Configuration&nbsp;configuration)
                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -915,7 +902,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getWALProvider</h4>
-<pre>public final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.462">getWALProvider</a>()</pre>
+<pre>public final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.463">getWALProvider</a>()</pre>
 </li>
 </ul>
 <a name="getMetaWALProvider--">
@@ -924,7 +911,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getMetaWALProvider</h4>
-<pre>public final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.466">getMetaWALProvider</a>()</pre>
+<pre>public final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALFactory.html#line.467">getMetaWALProvider</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALProvider.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALProvider.html b/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALProvider.html
index 3012b4f..c37a18b 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALProvider.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALProvider.html
@@ -276,7 +276,7 @@
 <td class="colLast"><span class="typeNameLabel">RegionGroupingProvider.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/wal/RegionGroupingProvider.html#createProvider-java.lang.String-">createProvider</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;group)</code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a></code></td>
+<td class="colFirst"><code>(package private) <a href="../../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a></code></td>
 <td class="colLast"><span class="typeNameLabel">WALFactory.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#getMetaProvider--">getMetaProvider</a></span>()</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
@@ -287,6 +287,10 @@
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a></code></td>
 <td class="colLast"><span class="typeNameLabel">WALFactory.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/wal/WALFactory.html#getWALProvider--">getWALProvider</a></span>()</code>&nbsp;</td>
 </tr>
+<tr class="rowColor">
+<td class="colFirst"><code>(package private) <a href="../../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a></code></td>
+<td class="colLast"><span class="typeNameLabel">SyncReplicationWALProvider.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.html#getWrappedProvider--">getWrappedProvider</a></span>()</code>&nbsp;</td>
+</tr>
 </tbody>
 </table>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/overview-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index b9b3051..496cdb7 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -799,6 +799,7 @@
 </li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">BucketCache.BucketEntryGroup</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">BucketCache.RAMQueueEntry</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">BucketProtoUtils</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallEvent.html" title="class in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallEvent</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="org/apache/hadoop/hbase/ipc/BufferChain.html" title="class in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferChain</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.encoding.<a href="org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html" title="class in org.apache.hadoop.hbase.io.encoding"><span class="typeNameLink">BufferedDataBlockEncoder.OnheapDecodedCell</span></a> (implements org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/ExtendedCell.html" title="interface in org.apache.hadoop.hbase">ExtendedCell</a>)</li>
@@ -1866,6 +1867,7 @@
 <li type="circle">org.apache.hadoop.hbase.backup.example.<a href="org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.html" title="class in org.apache.hadoop.hbase.backup.example"><span class="typeNameLink">HFileArchiveTableMonitor</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/HFileArchiveUtil.html" title="class in org.apache.hadoop.hbase.util"><span class="typeNameLink">HFileArchiveUtil</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock</span></a> (implements org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>)</li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.BlockDeserializer</span></a> (implements org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;T&gt;)</li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.FSReaderImpl</span></a> (implements org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.Header.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Header</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.PrefetchedHeader</span></a></li>
@@ -4114,7 +4116,6 @@
 </li>
 </ul>
 </li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.bucket.<a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">UniqueIndexMap</span></a>&lt;T&gt; (implements java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/UnsafeAccess.html" title="class in org.apache.hadoop.hbase.util"><span class="typeNameLink">UnsafeAccess</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/UnsafeAvailChecker.html" title="class in org.apache.hadoop.hbase.util"><span class="typeNameLink">UnsafeAvailChecker</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security"><span class="typeNameLink">User</span></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/serialized-form.html
----------------------------------------------------------------------
diff --git a/devapidocs/serialized-form.html b/devapidocs/serialized-form.html
index e0bd1b6..38f79a9 100644
--- a/devapidocs/serialized-form.html
+++ b/devapidocs/serialized-form.html
@@ -969,34 +969,6 @@
 </li>
 </ul>
 </li>
-<li class="blockList"><a name="org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap">
-<!--   -->
-</a>
-<h3>Class <a href="org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap</a> extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a> implements Serializable</h3>
-<dl class="nameValue">
-<dt>serialVersionUID:</dt>
-<dd>-1145635738654002342L</dd>
-</dl>
-<ul class="blockList">
-<li class="blockList">
-<h3>Serialized Fields</h3>
-<ul class="blockList">
-<li class="blockList">
-<h4>mForwardMap</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentHashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">K</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">V</a>&gt; mForwardMap</pre>
-</li>
-<li class="blockList">
-<h4>mReverseMap</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentHashMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">K</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">V</a>&gt; mReverseMap</pre>
-</li>
-<li class="blockListLast">
-<h4>mIndex</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> mIndex</pre>
-</li>
-</ul>
-</li>
-</ul>
-</li>
 <li class="blockList"><a name="org.apache.hadoop.hbase.io.hfile.bucket.UnsafeSharedMemoryBucketEntry">
 <!--   -->
 </a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 1b46255..f9ad79c 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -16,11 +16,11 @@
 <span class="sourceLineNo">008</span>@InterfaceAudience.Private<a name="line.8"></a>
 <span class="sourceLineNo">009</span>public class Version {<a name="line.9"></a>
 <span class="sourceLineNo">010</span>  public static final String version = "3.0.0-SNAPSHOT";<a name="line.10"></a>
-<span class="sourceLineNo">011</span>  public static final String revision = "323907f84fcb5ca2cb33131e212ccda4ace76c68";<a name="line.11"></a>
+<span class="sourceLineNo">011</span>  public static final String revision = "613d831429960348dc42c3bdb6ea5d31be15c81c";<a name="line.11"></a>
 <span class="sourceLineNo">012</span>  public static final String user = "jenkins";<a name="line.12"></a>
-<span class="sourceLineNo">013</span>  public static final String date = "Wed Aug  1 14:39:39 UTC 2018";<a name="line.13"></a>
+<span class="sourceLineNo">013</span>  public static final String date = "Thu Aug  2 19:41:57 UTC 2018";<a name="line.13"></a>
 <span class="sourceLineNo">014</span>  public static final String url = "git://jenkins-websites1.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";<a name="line.14"></a>
-<span class="sourceLineNo">015</span>  public static final String srcChecksum = "5b583e7044e19505b52ef0c80df82305";<a name="line.15"></a>
+<span class="sourceLineNo">015</span>  public static final String srcChecksum = "1bde06453e3a7ac1ff5e22617f911b02";<a name="line.15"></a>
 <span class="sourceLineNo">016</span>}<a name="line.16"></a>
 
 


[03/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/HBaseCluster.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/HBaseCluster.html b/testdevapidocs/org/apache/hadoop/hbase/HBaseCluster.html
index d51b5ba..e6811bb 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/HBaseCluster.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/HBaseCluster.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":10,"i5":10,"i6":6,"i7":10,"i8":6,"i9":10,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":10,"i16":10,"i17":10,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":10,"i28":6,"i29":10,"i30":6,"i31":6,"i32":6,"i33":10,"i34":10,"i35":6,"i36":6,"i37":6,"i38":6};
+var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":10,"i5":10,"i6":6,"i7":10,"i8":6,"i9":10,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":10,"i17":10,"i18":10,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":10,"i31":6,"i32":10,"i33":6,"i34":6,"i35":6,"i36":10,"i37":6,"i38":6,"i39":10,"i40":6,"i41":6,"i42":6,"i43":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public abstract class <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.58">HBaseCluster</a>
+public abstract class <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.59">HBaseCluster</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>, org.apache.hadoop.conf.Configurable</pre>
 <div class="block">This class defines methods that can help with managing HBase clusters
@@ -290,50 +290,57 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 </tr>
 <tr id="i13" class="rowColor">
 <td class="colFirst"><code>abstract void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#killNameNode-org.apache.hadoop.hbase.ServerName-">killNameNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
+<div class="block">Kills the namenode process if this is a distributed cluster, otherwise, this causes master to
+ exit doing basic clean up only.</div>
+</td>
+</tr>
+<tr id="i14" class="altColor">
+<td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#killRegionServer-org.apache.hadoop.hbase.ServerName-">killRegionServer</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Kills the region server process if this is a distributed cluster, otherwise
  this causes the region server to exit doing basic clean up only.</div>
 </td>
 </tr>
-<tr id="i14" class="altColor">
+<tr id="i15" class="rowColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#killZkNode-org.apache.hadoop.hbase.ServerName-">killZkNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Kills the zookeeper node process if this is a distributed cluster, otherwise,
  this causes master to exit doing basic clean up only.</div>
 </td>
 </tr>
-<tr id="i15" class="rowColor">
+<tr id="i16" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#restoreClusterMetrics-org.apache.hadoop.hbase.ClusterMetrics-">restoreClusterMetrics</a></span>(org.apache.hadoop.hbase.ClusterMetrics&nbsp;desiredStatus)</code>
 <div class="block">Restores the cluster to given state if this is a real cluster,
  otherwise does nothing.</div>
 </td>
 </tr>
-<tr id="i16" class="altColor">
+<tr id="i17" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#restoreInitialStatus--">restoreInitialStatus</a></span>()</code>
 <div class="block">Restores the cluster to it's initial state if this is a real cluster,
  otherwise does nothing.</div>
 </td>
 </tr>
-<tr id="i17" class="rowColor">
+<tr id="i18" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#setConf-org.apache.hadoop.conf.Configuration-">setConf</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>&nbsp;</td>
 </tr>
-<tr id="i18" class="altColor">
+<tr id="i19" class="rowColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#shutdown--">shutdown</a></span>()</code>
 <div class="block">Shut down the HBase cluster</div>
 </td>
 </tr>
-<tr id="i19" class="rowColor">
+<tr id="i20" class="altColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startDataNode-org.apache.hadoop.hbase.ServerName-">startDataNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Starts a new datanode on the given hostname or if this is a mini/local cluster,
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i20" class="altColor">
+<tr id="i21" class="rowColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startMaster-java.lang.String-int-">startMaster</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
            int&nbsp;port)</code>
@@ -341,7 +348,14 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
  starts a master locally.</div>
 </td>
 </tr>
-<tr id="i21" class="rowColor">
+<tr id="i22" class="altColor">
+<td class="colFirst"><code>abstract void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startNameNode-org.apache.hadoop.hbase.ServerName-">startNameNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
+<div class="block">Starts a new namenode on the given hostname or if this is a mini/local cluster, silently logs
+ warning message.</div>
+</td>
+</tr>
+<tr id="i23" class="rowColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startRegionServer-java.lang.String-int-">startRegionServer</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                  int&nbsp;port)</code>
@@ -349,7 +363,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
  starts a region server locally.</div>
 </td>
 </tr>
-<tr id="i22" class="altColor">
+<tr id="i24" class="altColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#startZkNode-java.lang.String-int-">startZkNode</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
            int&nbsp;port)</code>
@@ -357,78 +371,98 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i23" class="rowColor">
+<tr id="i25" class="rowColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopDataNode-org.apache.hadoop.hbase.ServerName-">stopDataNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Stops the datanode if this is a distributed cluster, otherwise
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i24" class="altColor">
+<tr id="i26" class="altColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopMaster-org.apache.hadoop.hbase.ServerName-">stopMaster</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Stops the given master, by attempting a gradual stop.</div>
 </td>
 </tr>
-<tr id="i25" class="rowColor">
+<tr id="i27" class="rowColor">
+<td class="colFirst"><code>abstract void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopNameNode-org.apache.hadoop.hbase.ServerName-">stopNameNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
+<div class="block">Stops the namenode if this is a distributed cluster, otherwise silently logs warning message.</div>
+</td>
+</tr>
+<tr id="i28" class="altColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopRegionServer-org.apache.hadoop.hbase.ServerName-">stopRegionServer</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Stops the given region server, by attempting a gradual stop.</div>
 </td>
 </tr>
-<tr id="i26" class="altColor">
+<tr id="i29" class="rowColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#stopZkNode-org.apache.hadoop.hbase.ServerName-">stopZkNode</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</code>
 <div class="block">Stops the region zookeeper if this is a distributed cluster, otherwise
  silently logs warning message.</div>
 </td>
 </tr>
-<tr id="i27" class="rowColor">
+<tr id="i30" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForActiveAndReadyMaster--">waitForActiveAndReadyMaster</a></span>()</code>
 <div class="block">Blocks until there is an active master and that master has completed
  initialization.</div>
 </td>
 </tr>
-<tr id="i28" class="altColor">
+<tr id="i31" class="rowColor">
 <td class="colFirst"><code>abstract boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForActiveAndReadyMaster-long-">waitForActiveAndReadyMaster</a></span>(long&nbsp;timeout)</code>
 <div class="block">Blocks until there is an active master and that master has completed
  initialization.</div>
 </td>
 </tr>
-<tr id="i29" class="rowColor">
+<tr id="i32" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForDatanodesRegistered-int-">waitForDatanodesRegistered</a></span>(int&nbsp;nbDN)</code>&nbsp;</td>
 </tr>
-<tr id="i30" class="altColor">
+<tr id="i33" class="rowColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForDataNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForDataNodeToStart</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                       long&nbsp;timeout)</code>
 <div class="block">Wait for the specified datanode to join the cluster</div>
 </td>
 </tr>
-<tr id="i31" class="rowColor">
+<tr id="i34" class="altColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForDataNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForDataNodeToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                      long&nbsp;timeout)</code>
 <div class="block">Wait for the specified datanode to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i32" class="altColor">
+<tr id="i35" class="rowColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForMasterToStop-org.apache.hadoop.hbase.ServerName-long-">waitForMasterToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                    long&nbsp;timeout)</code>
 <div class="block">Wait for the specified master to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i33" class="rowColor">
+<tr id="i36" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForNamenodeAvailable--">waitForNamenodeAvailable</a></span>()</code>
 <div class="block">Wait for the namenode.</div>
 </td>
 </tr>
-<tr id="i34" class="altColor">
+<tr id="i37" class="rowColor">
+<td class="colFirst"><code>abstract void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStart</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+                      long&nbsp;timeout)</code>
+<div class="block">Wait for the specified namenode to join the cluster</div>
+</td>
+</tr>
+<tr id="i38" class="altColor">
+<td class="colFirst"><code>abstract void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+                     long&nbsp;timeout)</code>
+<div class="block">Wait for the specified namenode to stop</div>
+</td>
+</tr>
+<tr id="i39" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForRegionServerToStart-java.lang.String-int-long-">waitForRegionServerToStart</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                           int&nbsp;port,
@@ -436,28 +470,28 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <div class="block">Wait for the specified region server to join the cluster</div>
 </td>
 </tr>
-<tr id="i35" class="rowColor">
+<tr id="i40" class="altColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForRegionServerToStop-org.apache.hadoop.hbase.ServerName-long-">waitForRegionServerToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                          long&nbsp;timeout)</code>
 <div class="block">Wait for the specified region server to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i36" class="altColor">
+<tr id="i41" class="rowColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForZkNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForZkNodeToStart</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                     long&nbsp;timeout)</code>
 <div class="block">Wait for the specified zookeeper node to join the cluster</div>
 </td>
 </tr>
-<tr id="i37" class="rowColor">
+<tr id="i42" class="altColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitForZkNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForZkNodeToStop</a></span>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                    long&nbsp;timeout)</code>
 <div class="block">Wait for the specified zookeeper node to stop the thread / process.</div>
 </td>
 </tr>
-<tr id="i38" class="altColor">
+<tr id="i43" class="rowColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/HBaseCluster.html#waitUntilShutDown--">waitUntilShutDown</a></span>()</code>
 <div class="block">Wait for HBase Cluster to shut down.</div>
@@ -491,7 +525,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>static final&nbsp;org.slf4j.Logger <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.60">LOG</a></pre>
+<pre>static final&nbsp;org.slf4j.Logger <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.61">LOG</a></pre>
 </li>
 </ul>
 <a name="conf">
@@ -500,7 +534,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>protected&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.61">conf</a></pre>
+<pre>protected&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.62">conf</a></pre>
 </li>
 </ul>
 <a name="initialClusterStatus">
@@ -509,7 +543,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>initialClusterStatus</h4>
-<pre>protected&nbsp;org.apache.hadoop.hbase.ClusterMetrics <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.64">initialClusterStatus</a></pre>
+<pre>protected&nbsp;org.apache.hadoop.hbase.ClusterMetrics <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.65">initialClusterStatus</a></pre>
 <div class="block">the status of the cluster before we begin</div>
 </li>
 </ul>
@@ -527,7 +561,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HBaseCluster</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.70">HBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.71">HBaseCluster</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Construct an HBaseCluster</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -549,7 +583,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setConf</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.75">setConf</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.76">setConf</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code>setConf</code>&nbsp;in interface&nbsp;<code>org.apache.hadoop.conf.Configurable</code></dd>
@@ -562,7 +596,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getConf</h4>
-<pre>public&nbsp;org.apache.hadoop.conf.Configuration&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.80">getConf</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.conf.Configuration&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.81">getConf</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code>getConf</code>&nbsp;in interface&nbsp;<code>org.apache.hadoop.conf.Configurable</code></dd>
@@ -575,7 +609,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getClusterMetrics</h4>
-<pre>public abstract&nbsp;org.apache.hadoop.hbase.ClusterMetrics&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.88">getClusterMetrics</a>()
+<pre>public abstract&nbsp;org.apache.hadoop.hbase.ClusterMetrics&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.89">getClusterMetrics</a>()
                                                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns a ClusterMetrics for this HBase cluster.</div>
 <dl>
@@ -592,7 +626,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getInitialClusterMetrics</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.ClusterMetrics&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.94">getInitialClusterMetrics</a>()
+<pre>public&nbsp;org.apache.hadoop.hbase.ClusterMetrics&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.95">getInitialClusterMetrics</a>()
                                                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns a ClusterStatus for this HBase cluster as observed at the
  starting of the HBaseCluster</div>
@@ -608,7 +642,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getMasterAdminService</h4>
-<pre>public abstract&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.101">getMasterAdminService</a>()
+<pre>public abstract&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.102">getMasterAdminService</a>()
                                                                                                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns an <code>MasterService.BlockingInterface</code> to the active master</div>
 <dl>
@@ -623,7 +657,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getAdminProtocol</h4>
-<pre>public abstract&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.107">getAdminProtocol</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public abstract&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.108">getAdminProtocol</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                                                                                                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns an AdminProtocol interface to the regionserver</div>
 <dl>
@@ -638,7 +672,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getClientProtocol</h4>
-<pre>public abstract&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.113">getClientProtocol</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public abstract&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.114">getClientProtocol</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                                                                                                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns a ClientProtocol interface to the regionserver</div>
 <dl>
@@ -653,7 +687,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>startRegionServer</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.122">startRegionServer</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.123">startRegionServer</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                                        int&nbsp;port)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts a new region server on the given hostname or if this is a mini/local cluster,
@@ -672,7 +706,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>killRegionServer</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.129">killRegionServer</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.130">killRegionServer</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Kills the region server process if this is a distributed cluster, otherwise
  this causes the region server to exit doing basic clean up only.</div>
@@ -688,7 +722,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>isKilledRS</h4>
-<pre>public abstract&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.138">isKilledRS</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</pre>
+<pre>public abstract&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.139">isKilledRS</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)</pre>
 <div class="block">Keeping track of killed servers and being able to check if a particular server was killed makes
  it possible to do fault tolerance testing for dead servers in a deterministic way. A concrete
  example of such case is - killing servers and waiting for all regions of a particular table
@@ -702,7 +736,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>stopRegionServer</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.145">stopRegionServer</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.146">stopRegionServer</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Stops the given region server, by attempting a gradual stop.</div>
 <dl>
@@ -717,7 +751,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForRegionServerToStart</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.152">waitForRegionServerToStart</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.153">waitForRegionServerToStart</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                                        int&nbsp;port,
                                        long&nbsp;timeout)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -734,7 +768,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForRegionServerToStop</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.172">waitForRegionServerToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.173">waitForRegionServerToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                                long&nbsp;timeout)
                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Wait for the specified region server to stop the thread / process.</div>
@@ -750,7 +784,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>startZkNode</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.181">startZkNode</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.182">startZkNode</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                                  int&nbsp;port)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts a new zookeeper node on the given hostname or if this is a mini/local cluster,
@@ -769,7 +803,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>killZkNode</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.188">killZkNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.189">killZkNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Kills the zookeeper node process if this is a distributed cluster, otherwise,
  this causes master to exit doing basic clean up only.</div>
@@ -785,7 +819,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>stopZkNode</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.195">stopZkNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.196">stopZkNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Stops the region zookeeper if this is a distributed cluster, otherwise
  silently logs warning message.</div>
@@ -801,7 +835,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForZkNodeToStart</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.202">waitForZkNodeToStart</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.203">waitForZkNodeToStart</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                           long&nbsp;timeout)
                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Wait for the specified zookeeper node to join the cluster</div>
@@ -817,7 +851,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForZkNodeToStop</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.210">waitForZkNodeToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.211">waitForZkNodeToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                          long&nbsp;timeout)
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Wait for the specified zookeeper node to stop the thread / process.</div>
@@ -833,7 +867,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>startDataNode</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.218">startDataNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.219">startDataNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts a new datanode on the given hostname or if this is a mini/local cluster,
  silently logs warning message.</div>
@@ -849,7 +883,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>killDataNode</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.225">killDataNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.226">killDataNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Kills the datanode process if this is a distributed cluster, otherwise,
  this causes master to exit doing basic clean up only.</div>
@@ -865,7 +899,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>stopDataNode</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.232">stopDataNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.233">stopDataNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Stops the datanode if this is a distributed cluster, otherwise
  silently logs warning message.</div>
@@ -881,7 +915,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForDataNodeToStart</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.239">waitForDataNodeToStart</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.240">waitForDataNodeToStart</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                             long&nbsp;timeout)
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Wait for the specified datanode to join the cluster</div>
@@ -897,7 +931,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForDataNodeToStop</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.247">waitForDataNodeToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.248">waitForDataNodeToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                            long&nbsp;timeout)
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Wait for the specified datanode to stop the thread / process.</div>
@@ -907,13 +941,92 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 </dl>
 </li>
 </ul>
+<a name="startNameNode-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>startNameNode</h4>
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.256">startNameNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Starts a new namenode on the given hostname or if this is a mini/local cluster, silently logs
+ warning message.</div>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong</dd>
+</dl>
+</li>
+</ul>
+<a name="killNameNode-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>killNameNode</h4>
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.263">killNameNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Kills the namenode process if this is a distributed cluster, otherwise, this causes master to
+ exit doing basic clean up only.</div>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong</dd>
+</dl>
+</li>
+</ul>
+<a name="stopNameNode-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>stopNameNode</h4>
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.269">stopNameNode</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Stops the namenode if this is a distributed cluster, otherwise silently logs warning message.</div>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong</dd>
+</dl>
+</li>
+</ul>
+<a name="waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>waitForNameNodeToStart</h4>
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.276">waitForNameNodeToStart</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+                                            long&nbsp;timeout)
+                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Wait for the specified namenode to join the cluster</div>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong or timeout occurs</dd>
+</dl>
+</li>
+</ul>
+<a name="waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>waitForNameNodeToStop</h4>
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.284">waitForNameNodeToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+                                           long&nbsp;timeout)
+                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Wait for the specified namenode to stop</div>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if something goes wrong or timeout occurs</dd>
+</dl>
+</li>
+</ul>
 <a name="startMaster-java.lang.String-int-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>startMaster</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.257">startMaster</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.294">startMaster</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
                                  int&nbsp;port)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts a new master on the given hostname or if this is a mini/local cluster,
@@ -932,7 +1045,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>killMaster</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.264">killMaster</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.301">killMaster</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Kills the master process if this is a distributed cluster, otherwise,
  this causes master to exit doing basic clean up only.</div>
@@ -948,7 +1061,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>stopMaster</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.270">stopMaster</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.307">stopMaster</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Stops the given master, by attempting a gradual stop.</div>
 <dl>
@@ -963,7 +1076,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForMasterToStop</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.276">waitForMasterToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.313">waitForMasterToStop</a>(org.apache.hadoop.hbase.ServerName&nbsp;serverName,
                                          long&nbsp;timeout)
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Wait for the specified master to stop the thread / process.</div>
@@ -979,7 +1092,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForActiveAndReadyMaster</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.287">waitForActiveAndReadyMaster</a>()
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.324">waitForActiveAndReadyMaster</a>()
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Blocks until there is an active master and that master has completed
  initialization.</div>
@@ -998,7 +1111,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForActiveAndReadyMaster</h4>
-<pre>public abstract&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.299">waitForActiveAndReadyMaster</a>(long&nbsp;timeout)
+<pre>public abstract&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.336">waitForActiveAndReadyMaster</a>(long&nbsp;timeout)
                                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Blocks until there is an active master and that master has completed
  initialization.</div>
@@ -1019,7 +1132,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>waitUntilShutDown</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.305">waitUntilShutDown</a>()
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.342">waitUntilShutDown</a>()
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Wait for HBase Cluster to shut down.</div>
 <dl>
@@ -1034,7 +1147,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shutdown</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.310">shutdown</a>()
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.347">shutdown</a>()
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Shut down the HBase cluster</div>
 <dl>
@@ -1049,7 +1162,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>restoreInitialStatus</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.319">restoreInitialStatus</a>()
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.356">restoreInitialStatus</a>()
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Restores the cluster to it's initial state if this is a real cluster,
  otherwise does nothing.
@@ -1069,7 +1182,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>restoreClusterMetrics</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.330">restoreClusterMetrics</a>(org.apache.hadoop.hbase.ClusterMetrics&nbsp;desiredStatus)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.367">restoreClusterMetrics</a>(org.apache.hadoop.hbase.ClusterMetrics&nbsp;desiredStatus)
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Restores the cluster to given state if this is a real cluster,
  otherwise does nothing.
@@ -1089,7 +1202,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getServerHoldingMeta</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.ServerName&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.337">getServerHoldingMeta</a>()
+<pre>public&nbsp;org.apache.hadoop.hbase.ServerName&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.374">getServerHoldingMeta</a>()
                                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get the ServerName of region server serving the first hbase:meta region</div>
 <dl>
@@ -1104,7 +1217,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getServerHoldingRegion</h4>
-<pre>public abstract&nbsp;org.apache.hadoop.hbase.ServerName&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.348">getServerHoldingRegion</a>(org.apache.hadoop.hbase.TableName&nbsp;tn,
+<pre>public abstract&nbsp;org.apache.hadoop.hbase.ServerName&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.385">getServerHoldingRegion</a>(org.apache.hadoop.hbase.TableName&nbsp;tn,
                                                                           byte[]&nbsp;regionName)
                                                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get the ServerName of region server serving the specified region</div>
@@ -1125,7 +1238,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>isDistributedCluster</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.355">isDistributedCluster</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.392">isDistributedCluster</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>whether we are interacting with a distributed cluster as opposed to an
@@ -1139,7 +1252,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.365">close</a>()
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.402">close</a>()
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Closes all the resources held open for this cluster. Note that this call does not shutdown
  the cluster.</div>
@@ -1161,7 +1274,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForNamenodeAvailable</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.372">waitForNamenodeAvailable</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.409">waitForNamenodeAvailable</a>()
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Wait for the namenode.</div>
 <dl>
@@ -1176,7 +1289,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>waitForDatanodesRegistered</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.375">waitForDatanodesRegistered</a>(int&nbsp;nbDN)
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseCluster.html#line.412">waitForDatanodesRegistered</a>(int&nbsp;nbDN)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html
index 25b9c47..105cfcb 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static enum <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#line.168">HBaseClusterManager.CommandProvider.Operation</a>
+<pre>static enum <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#line.169">HBaseClusterManager.CommandProvider.Operation</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a>&gt;</pre>
 </li>
 </ul>
@@ -213,7 +213,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>START</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html#line.169">START</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html#line.170">START</a></pre>
 </li>
 </ul>
 <a name="STOP">
@@ -222,7 +222,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>STOP</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html#line.169">STOP</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html#line.170">STOP</a></pre>
 </li>
 </ul>
 <a name="RESTART">
@@ -231,7 +231,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>RESTART</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html#line.169">RESTART</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html#line.170">RESTART</a></pre>
 </li>
 </ul>
 </li>
@@ -248,7 +248,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a>[]&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html#line.168">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a>[]&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html#line.169">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -268,7 +268,7 @@ for (HBaseClusterManager.CommandProvider.Operation c : HBaseClusterManager.Comma
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html#line.168">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html#line.169">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html
index 539e289..f568efb 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>abstract static class <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.166">HBaseClusterManager.CommandProvider</a>
+<pre>abstract static class <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.167">HBaseClusterManager.CommandProvider</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Provides command strings for services to be executed by Shell. CommandProviders are
  pluggable, and different deployments(windows, bigtop, etc) can be managed by
@@ -222,7 +222,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CommandProvider</h4>
-<pre><a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#line.166">CommandProvider</a>()</pre>
+<pre><a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#line.167">CommandProvider</a>()</pre>
 </li>
 </ul>
 </li>
@@ -239,7 +239,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getCommand</h4>
-<pre>public abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#line.172">getCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#line.173">getCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                                   <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a>&nbsp;op)</pre>
 </li>
 </ul>
@@ -249,7 +249,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isRunningCommand</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#line.174">isRunningCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#line.175">isRunningCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service)</pre>
 </li>
 </ul>
 <a name="findPidCommand-org.apache.hadoop.hbase.ClusterManager.ServiceType-">
@@ -258,7 +258,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>findPidCommand</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#line.178">findPidCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service)</pre>
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#line.179">findPidCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service)</pre>
 </li>
 </ul>
 <a name="signalCommand-org.apache.hadoop.hbase.ClusterManager.ServiceType-java.lang.String-">
@@ -267,7 +267,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>signalCommand</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#line.183">signalCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html#line.184">signalCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;signal)</pre>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html
index 1206c64..ef4311f 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.191">HBaseClusterManager.HBaseShellCommandProvider</a>
+<pre>static class <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.192">HBaseClusterManager.HBaseShellCommandProvider</a>
 extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html" title="class in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider</a></pre>
 <div class="block">CommandProvider to manage the service using bin/hbase-* scripts</div>
 </li>
@@ -234,7 +234,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockList">
 <li class="blockList">
 <h4>hbaseHome</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html#line.192">hbaseHome</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html#line.193">hbaseHome</a></pre>
 </li>
 </ul>
 <a name="confDir">
@@ -243,7 +243,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockListLast">
 <li class="blockList">
 <h4>confDir</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html#line.193">confDir</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html#line.194">confDir</a></pre>
 </li>
 </ul>
 </li>
@@ -260,7 +260,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HBaseShellCommandProvider</h4>
-<pre><a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html#line.195">HBaseShellCommandProvider</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre><a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html#line.196">HBaseShellCommandProvider</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 </li>
@@ -277,7 +277,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getCommand</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html#line.208">getCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HBaseShellCommandProvider.html#line.209">getCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                          <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a>&nbsp;op)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html
index af02135..1168efa 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.217">HBaseClusterManager.HadoopShellCommandProvider</a>
+<pre>static class <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.html#line.218">HBaseClusterManager.HadoopShellCommandProvider</a>
 extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.html" title="class in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider</a></pre>
 <div class="block">CommandProvider to manage the service using sbin/hadoop-* scripts.</div>
 </li>
@@ -234,7 +234,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockList">
 <li class="blockList">
 <h4>hadoopHome</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html#line.218">hadoopHome</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html#line.219">hadoopHome</a></pre>
 </li>
 </ul>
 <a name="confDir">
@@ -243,7 +243,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockListLast">
 <li class="blockList">
 <h4>confDir</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html#line.219">confDir</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html#line.220">confDir</a></pre>
 </li>
 </ul>
 </li>
@@ -260,7 +260,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HadoopShellCommandProvider</h4>
-<pre><a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html#line.221">HadoopShellCommandProvider</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
+<pre><a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html#line.222">HadoopShellCommandProvider</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -282,7 +282,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Command
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getCommand</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html#line.238">getCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HBaseClusterManager.HadoopShellCommandProvider.html#line.239">getCommand</a>(<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase">ClusterManager.ServiceType</a>&nbsp;service,
                          <a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase">HBaseClusterManager.CommandProvider.Operation</a>&nbsp;op)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>


[13/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html
new file mode 100644
index 0000000..80852ec
--- /dev/null
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html
@@ -0,0 +1,263 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html lang="en">
+<head>
+<title>Source code</title>
+<link rel="stylesheet" type="text/css" href="../../../../../../../../stylesheet.css" title="Style">
+</head>
+<body>
+<div class="sourceContainer">
+<pre><span class="sourceLineNo">001</span>/*<a name="line.1"></a>
+<span class="sourceLineNo">002</span> * Copyright The Apache Software Foundation<a name="line.2"></a>
+<span class="sourceLineNo">003</span> *<a name="line.3"></a>
+<span class="sourceLineNo">004</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.4"></a>
+<span class="sourceLineNo">005</span> * or more contributor license agreements.  See the NOTICE file<a name="line.5"></a>
+<span class="sourceLineNo">006</span> * distributed with this work for additional information<a name="line.6"></a>
+<span class="sourceLineNo">007</span> * regarding copyright ownership.  The ASF licenses this file<a name="line.7"></a>
+<span class="sourceLineNo">008</span> * to you under the Apache License, Version 2.0 (the<a name="line.8"></a>
+<span class="sourceLineNo">009</span> * "License"); you may not use this file except in compliance<a name="line.9"></a>
+<span class="sourceLineNo">010</span> * with the License.  You may obtain a copy of the License at<a name="line.10"></a>
+<span class="sourceLineNo">011</span> *<a name="line.11"></a>
+<span class="sourceLineNo">012</span> *     http://www.apache.org/licenses/LICENSE-2.0<a name="line.12"></a>
+<span class="sourceLineNo">013</span> *<a name="line.13"></a>
+<span class="sourceLineNo">014</span> * Unless required by applicable law or agreed to in writing, software<a name="line.14"></a>
+<span class="sourceLineNo">015</span><a name="line.15"></a>
+<span class="sourceLineNo">016</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.16"></a>
+<span class="sourceLineNo">017</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.17"></a>
+<span class="sourceLineNo">018</span> * See the License for the specific language governing permissions and<a name="line.18"></a>
+<span class="sourceLineNo">019</span> * limitations under the License.<a name="line.19"></a>
+<span class="sourceLineNo">020</span> */<a name="line.20"></a>
+<span class="sourceLineNo">021</span>package org.apache.hadoop.hbase.io.hfile.bucket;<a name="line.21"></a>
+<span class="sourceLineNo">022</span><a name="line.22"></a>
+<span class="sourceLineNo">023</span>import java.io.IOException;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import java.util.Map;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.25"></a>
+<span class="sourceLineNo">026</span><a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.32"></a>
+<span class="sourceLineNo">033</span><a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos;<a name="line.34"></a>
+<span class="sourceLineNo">035</span><a name="line.35"></a>
+<span class="sourceLineNo">036</span>@InterfaceAudience.Private<a name="line.36"></a>
+<span class="sourceLineNo">037</span>final class BucketProtoUtils {<a name="line.37"></a>
+<span class="sourceLineNo">038</span>  private BucketProtoUtils() {<a name="line.38"></a>
+<span class="sourceLineNo">039</span><a name="line.39"></a>
+<span class="sourceLineNo">040</span>  }<a name="line.40"></a>
+<span class="sourceLineNo">041</span><a name="line.41"></a>
+<span class="sourceLineNo">042</span>  static BucketCacheProtos.BucketCacheEntry toPB(BucketCache cache) {<a name="line.42"></a>
+<span class="sourceLineNo">043</span>    return BucketCacheProtos.BucketCacheEntry.newBuilder()<a name="line.43"></a>
+<span class="sourceLineNo">044</span>        .setCacheCapacity(cache.getMaxSize())<a name="line.44"></a>
+<span class="sourceLineNo">045</span>        .setIoClass(cache.ioEngine.getClass().getName())<a name="line.45"></a>
+<span class="sourceLineNo">046</span>        .setMapClass(cache.backingMap.getClass().getName())<a name="line.46"></a>
+<span class="sourceLineNo">047</span>        .putAllDeserializers(CacheableDeserializerIdManager.save())<a name="line.47"></a>
+<span class="sourceLineNo">048</span>        .setBackingMap(BucketProtoUtils.toPB(cache.backingMap))<a name="line.48"></a>
+<span class="sourceLineNo">049</span>        .build();<a name="line.49"></a>
+<span class="sourceLineNo">050</span>  }<a name="line.50"></a>
+<span class="sourceLineNo">051</span><a name="line.51"></a>
+<span class="sourceLineNo">052</span>  private static BucketCacheProtos.BackingMap toPB(<a name="line.52"></a>
+<span class="sourceLineNo">053</span>      Map&lt;BlockCacheKey, BucketCache.BucketEntry&gt; backingMap) {<a name="line.53"></a>
+<span class="sourceLineNo">054</span>    BucketCacheProtos.BackingMap.Builder builder = BucketCacheProtos.BackingMap.newBuilder();<a name="line.54"></a>
+<span class="sourceLineNo">055</span>    for (Map.Entry&lt;BlockCacheKey, BucketCache.BucketEntry&gt; entry : backingMap.entrySet()) {<a name="line.55"></a>
+<span class="sourceLineNo">056</span>      builder.addEntry(BucketCacheProtos.BackingMapEntry.newBuilder()<a name="line.56"></a>
+<span class="sourceLineNo">057</span>          .setKey(toPB(entry.getKey()))<a name="line.57"></a>
+<span class="sourceLineNo">058</span>          .setValue(toPB(entry.getValue()))<a name="line.58"></a>
+<span class="sourceLineNo">059</span>          .build());<a name="line.59"></a>
+<span class="sourceLineNo">060</span>    }<a name="line.60"></a>
+<span class="sourceLineNo">061</span>    return builder.build();<a name="line.61"></a>
+<span class="sourceLineNo">062</span>  }<a name="line.62"></a>
+<span class="sourceLineNo">063</span><a name="line.63"></a>
+<span class="sourceLineNo">064</span>  private static BucketCacheProtos.BlockCacheKey toPB(BlockCacheKey key) {<a name="line.64"></a>
+<span class="sourceLineNo">065</span>    return BucketCacheProtos.BlockCacheKey.newBuilder()<a name="line.65"></a>
+<span class="sourceLineNo">066</span>        .setHfilename(key.getHfileName())<a name="line.66"></a>
+<span class="sourceLineNo">067</span>        .setOffset(key.getOffset())<a name="line.67"></a>
+<span class="sourceLineNo">068</span>        .setPrimaryReplicaBlock(key.isPrimary())<a name="line.68"></a>
+<span class="sourceLineNo">069</span>        .setBlockType(toPB(key.getBlockType()))<a name="line.69"></a>
+<span class="sourceLineNo">070</span>        .build();<a name="line.70"></a>
+<span class="sourceLineNo">071</span>  }<a name="line.71"></a>
+<span class="sourceLineNo">072</span><a name="line.72"></a>
+<span class="sourceLineNo">073</span>  private static BucketCacheProtos.BlockType toPB(BlockType blockType) {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    switch(blockType) {<a name="line.74"></a>
+<span class="sourceLineNo">075</span>      case DATA:<a name="line.75"></a>
+<span class="sourceLineNo">076</span>        return BucketCacheProtos.BlockType.data;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      case META:<a name="line.77"></a>
+<span class="sourceLineNo">078</span>        return BucketCacheProtos.BlockType.meta;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>      case TRAILER:<a name="line.79"></a>
+<span class="sourceLineNo">080</span>        return BucketCacheProtos.BlockType.trailer;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>      case INDEX_V1:<a name="line.81"></a>
+<span class="sourceLineNo">082</span>        return BucketCacheProtos.BlockType.index_v1;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>      case FILE_INFO:<a name="line.83"></a>
+<span class="sourceLineNo">084</span>        return BucketCacheProtos.BlockType.file_info;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>      case LEAF_INDEX:<a name="line.85"></a>
+<span class="sourceLineNo">086</span>        return BucketCacheProtos.BlockType.leaf_index;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>      case ROOT_INDEX:<a name="line.87"></a>
+<span class="sourceLineNo">088</span>        return BucketCacheProtos.BlockType.root_index;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>      case BLOOM_CHUNK:<a name="line.89"></a>
+<span class="sourceLineNo">090</span>        return BucketCacheProtos.BlockType.bloom_chunk;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>      case ENCODED_DATA:<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        return BucketCacheProtos.BlockType.encoded_data;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>      case GENERAL_BLOOM_META:<a name="line.93"></a>
+<span class="sourceLineNo">094</span>        return BucketCacheProtos.BlockType.general_bloom_meta;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>      case INTERMEDIATE_INDEX:<a name="line.95"></a>
+<span class="sourceLineNo">096</span>        return BucketCacheProtos.BlockType.intermediate_index;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      case DELETE_FAMILY_BLOOM_META:<a name="line.97"></a>
+<span class="sourceLineNo">098</span>        return BucketCacheProtos.BlockType.delete_family_bloom_meta;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>      default:<a name="line.99"></a>
+<span class="sourceLineNo">100</span>        throw new Error("Unrecognized BlockType.");<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    }<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  }<a name="line.102"></a>
+<span class="sourceLineNo">103</span><a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private static BucketCacheProtos.BucketEntry toPB(BucketCache.BucketEntry entry) {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>    return BucketCacheProtos.BucketEntry.newBuilder()<a name="line.105"></a>
+<span class="sourceLineNo">106</span>        .setOffset(entry.offset())<a name="line.106"></a>
+<span class="sourceLineNo">107</span>        .setLength(entry.getLength())<a name="line.107"></a>
+<span class="sourceLineNo">108</span>        .setDeserialiserIndex(entry.deserialiserIndex)<a name="line.108"></a>
+<span class="sourceLineNo">109</span>        .setAccessCounter(entry.getAccessCounter())<a name="line.109"></a>
+<span class="sourceLineNo">110</span>        .setPriority(toPB(entry.getPriority()))<a name="line.110"></a>
+<span class="sourceLineNo">111</span>        .build();<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  }<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  private static BucketCacheProtos.BlockPriority toPB(BlockPriority p) {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    switch (p) {<a name="line.115"></a>
+<span class="sourceLineNo">116</span>      case MULTI:<a name="line.116"></a>
+<span class="sourceLineNo">117</span>        return BucketCacheProtos.BlockPriority.multi;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      case MEMORY:<a name="line.118"></a>
+<span class="sourceLineNo">119</span>        return BucketCacheProtos.BlockPriority.memory;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>      case SINGLE:<a name="line.120"></a>
+<span class="sourceLineNo">121</span>        return BucketCacheProtos.BlockPriority.single;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>      default:<a name="line.122"></a>
+<span class="sourceLineNo">123</span>        throw new Error("Unrecognized BlockPriority.");<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    }<a name="line.124"></a>
+<span class="sourceLineNo">125</span>  }<a name="line.125"></a>
+<span class="sourceLineNo">126</span><a name="line.126"></a>
+<span class="sourceLineNo">127</span>  static ConcurrentHashMap&lt;BlockCacheKey, BucketCache.BucketEntry&gt; fromPB(<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      Map&lt;Integer, String&gt; deserializers, BucketCacheProtos.BackingMap backingMap)<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      throws IOException {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    ConcurrentHashMap&lt;BlockCacheKey, BucketCache.BucketEntry&gt; result = new ConcurrentHashMap&lt;&gt;();<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    for (BucketCacheProtos.BackingMapEntry entry : backingMap.getEntryList()) {<a name="line.131"></a>
+<span class="sourceLineNo">132</span>      BucketCacheProtos.BlockCacheKey protoKey = entry.getKey();<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      BlockCacheKey key = new BlockCacheKey(protoKey.getHfilename(), protoKey.getOffset(),<a name="line.133"></a>
+<span class="sourceLineNo">134</span>          protoKey.getPrimaryReplicaBlock(), fromPb(protoKey.getBlockType()));<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      BucketCacheProtos.BucketEntry protoValue = entry.getValue();<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      BucketCache.BucketEntry value = new BucketCache.BucketEntry(<a name="line.136"></a>
+<span class="sourceLineNo">137</span>          protoValue.getOffset(),<a name="line.137"></a>
+<span class="sourceLineNo">138</span>          protoValue.getLength(),<a name="line.138"></a>
+<span class="sourceLineNo">139</span>          protoValue.getAccessCounter(),<a name="line.139"></a>
+<span class="sourceLineNo">140</span>          protoValue.getPriority() == BucketCacheProtos.BlockPriority.memory);<a name="line.140"></a>
+<span class="sourceLineNo">141</span>      // This is the deserializer that we stored<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      int oldIndex = protoValue.getDeserialiserIndex();<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      String deserializerClass = deserializers.get(oldIndex);<a name="line.143"></a>
+<span class="sourceLineNo">144</span>      if (deserializerClass == null) {<a name="line.144"></a>
+<span class="sourceLineNo">145</span>        throw new IOException("Found deserializer index without matching entry.");<a name="line.145"></a>
+<span class="sourceLineNo">146</span>      }<a name="line.146"></a>
+<span class="sourceLineNo">147</span>      // Convert it to the identifier for the deserializer that we have in this runtime<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      if (deserializerClass.equals(HFileBlock.BlockDeserializer.class.getName())) {<a name="line.148"></a>
+<span class="sourceLineNo">149</span>        int actualIndex = HFileBlock.BLOCK_DESERIALIZER.getDeserialiserIdentifier();<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        value.deserialiserIndex = (byte) actualIndex;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      } else {<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        // We could make this more plugable, but right now HFileBlock is the only implementation<a name="line.152"></a>
+<span class="sourceLineNo">153</span>        // of Cacheable outside of tests, so this might not ever matter.<a name="line.153"></a>
+<span class="sourceLineNo">154</span>        throw new IOException("Unknown deserializer class found: " + deserializerClass);<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      }<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      result.put(key, value);<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    }<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    return result;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  private static BlockType fromPb(BucketCacheProtos.BlockType blockType) {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    switch (blockType) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      case data:<a name="line.163"></a>
+<span class="sourceLineNo">164</span>        return BlockType.DATA;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      case meta:<a name="line.165"></a>
+<span class="sourceLineNo">166</span>        return BlockType.META;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      case trailer:<a name="line.167"></a>
+<span class="sourceLineNo">168</span>        return BlockType.TRAILER;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      case index_v1:<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        return BlockType.INDEX_V1;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      case file_info:<a name="line.171"></a>
+<span class="sourceLineNo">172</span>        return BlockType.FILE_INFO;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>      case leaf_index:<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        return BlockType.LEAF_INDEX;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      case root_index:<a name="line.175"></a>
+<span class="sourceLineNo">176</span>        return BlockType.ROOT_INDEX;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>      case bloom_chunk:<a name="line.177"></a>
+<span class="sourceLineNo">178</span>        return BlockType.BLOOM_CHUNK;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      case encoded_data:<a name="line.179"></a>
+<span class="sourceLineNo">180</span>        return BlockType.ENCODED_DATA;<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      case general_bloom_meta:<a name="line.181"></a>
+<span class="sourceLineNo">182</span>        return BlockType.GENERAL_BLOOM_META;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      case intermediate_index:<a name="line.183"></a>
+<span class="sourceLineNo">184</span>        return BlockType.INTERMEDIATE_INDEX;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      case delete_family_bloom_meta:<a name="line.185"></a>
+<span class="sourceLineNo">186</span>        return BlockType.DELETE_FAMILY_BLOOM_META;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      default:<a name="line.187"></a>
+<span class="sourceLineNo">188</span>        throw new Error("Unrecognized BlockType.");<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    }<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  }<a name="line.190"></a>
+<span class="sourceLineNo">191</span>}<a name="line.191"></a>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+</pre>
+</div>
+</body>
+</html>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html
deleted file mode 100644
index e17186e..0000000
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html
+++ /dev/null
@@ -1,128 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<html lang="en">
-<head>
-<title>Source code</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../../../stylesheet.css" title="Style">
-</head>
-<body>
-<div class="sourceContainer">
-<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a>
-<span class="sourceLineNo">002</span> * Copyright The Apache Software Foundation<a name="line.2"></a>
-<span class="sourceLineNo">003</span> *<a name="line.3"></a>
-<span class="sourceLineNo">004</span> * Licensed to the Apache Software Foundation (ASF) under one or more<a name="line.4"></a>
-<span class="sourceLineNo">005</span> * contributor license agreements. See the NOTICE file distributed with this<a name="line.5"></a>
-<span class="sourceLineNo">006</span> * work for additional information regarding copyright ownership. The ASF<a name="line.6"></a>
-<span class="sourceLineNo">007</span> * licenses this file to you under the Apache License, Version 2.0 (the<a name="line.7"></a>
-<span class="sourceLineNo">008</span> * "License"); you may not use this file except in compliance with the License.<a name="line.8"></a>
-<span class="sourceLineNo">009</span> * You may obtain a copy of the License at<a name="line.9"></a>
-<span class="sourceLineNo">010</span> *<a name="line.10"></a>
-<span class="sourceLineNo">011</span> * http://www.apache.org/licenses/LICENSE-2.0<a name="line.11"></a>
-<span class="sourceLineNo">012</span> *<a name="line.12"></a>
-<span class="sourceLineNo">013</span> * Unless required by applicable law or agreed to in writing, software<a name="line.13"></a>
-<span class="sourceLineNo">014</span> * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT<a name="line.14"></a>
-<span class="sourceLineNo">015</span> * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the<a name="line.15"></a>
-<span class="sourceLineNo">016</span> * License for the specific language governing permissions and limitations<a name="line.16"></a>
-<span class="sourceLineNo">017</span> * under the License.<a name="line.17"></a>
-<span class="sourceLineNo">018</span> */<a name="line.18"></a>
-<span class="sourceLineNo">019</span>package org.apache.hadoop.hbase.io.hfile.bucket;<a name="line.19"></a>
-<span class="sourceLineNo">020</span><a name="line.20"></a>
-<span class="sourceLineNo">021</span>import java.io.Serializable;<a name="line.21"></a>
-<span class="sourceLineNo">022</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.23"></a>
-<span class="sourceLineNo">024</span><a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.25"></a>
-<span class="sourceLineNo">026</span><a name="line.26"></a>
-<span class="sourceLineNo">027</span>/**<a name="line.27"></a>
-<span class="sourceLineNo">028</span> * Map from type T to int and vice-versa. Used for reducing bit field item<a name="line.28"></a>
-<span class="sourceLineNo">029</span> * counts.<a name="line.29"></a>
-<span class="sourceLineNo">030</span> */<a name="line.30"></a>
-<span class="sourceLineNo">031</span>@InterfaceAudience.Private<a name="line.31"></a>
-<span class="sourceLineNo">032</span>public final class UniqueIndexMap&lt;T&gt; implements Serializable {<a name="line.32"></a>
-<span class="sourceLineNo">033</span>  private static final long serialVersionUID = -1145635738654002342L;<a name="line.33"></a>
-<span class="sourceLineNo">034</span><a name="line.34"></a>
-<span class="sourceLineNo">035</span>  ConcurrentHashMap&lt;T, Integer&gt; mForwardMap = new ConcurrentHashMap&lt;&gt;();<a name="line.35"></a>
-<span class="sourceLineNo">036</span>  ConcurrentHashMap&lt;Integer, T&gt; mReverseMap = new ConcurrentHashMap&lt;&gt;();<a name="line.36"></a>
-<span class="sourceLineNo">037</span>  AtomicInteger mIndex = new AtomicInteger(0);<a name="line.37"></a>
-<span class="sourceLineNo">038</span><a name="line.38"></a>
-<span class="sourceLineNo">039</span>  // Map a length to an index. If we can't, allocate a new mapping. We might<a name="line.39"></a>
-<span class="sourceLineNo">040</span>  // race here and get two entries with the same deserialiser. This is fine.<a name="line.40"></a>
-<span class="sourceLineNo">041</span>  int map(T parameter) {<a name="line.41"></a>
-<span class="sourceLineNo">042</span>    Integer ret = mForwardMap.get(parameter);<a name="line.42"></a>
-<span class="sourceLineNo">043</span>    if (ret != null) return ret.intValue();<a name="line.43"></a>
-<span class="sourceLineNo">044</span>    int nexti = mIndex.incrementAndGet();<a name="line.44"></a>
-<span class="sourceLineNo">045</span>    assert (nexti &lt; Short.MAX_VALUE);<a name="line.45"></a>
-<span class="sourceLineNo">046</span>    mForwardMap.put(parameter, nexti);<a name="line.46"></a>
-<span class="sourceLineNo">047</span>    mReverseMap.put(nexti, parameter);<a name="line.47"></a>
-<span class="sourceLineNo">048</span>    return nexti;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>  }<a name="line.49"></a>
-<span class="sourceLineNo">050</span><a name="line.50"></a>
-<span class="sourceLineNo">051</span>  T unmap(int leni) {<a name="line.51"></a>
-<span class="sourceLineNo">052</span>    Integer len = Integer.valueOf(leni);<a name="line.52"></a>
-<span class="sourceLineNo">053</span>    assert mReverseMap.containsKey(len);<a name="line.53"></a>
-<span class="sourceLineNo">054</span>    return mReverseMap.get(len);<a name="line.54"></a>
-<span class="sourceLineNo">055</span>  }<a name="line.55"></a>
-<span class="sourceLineNo">056</span>}<a name="line.56"></a>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-</pre>
-</div>
-</body>
-</html>


[19/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
index bd3c59e..21e240a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
@@ -33,62 +33,62 @@
 <span class="sourceLineNo">025</span>import java.io.FileNotFoundException;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.io.FileOutputStream;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.io.IOException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.io.ObjectInputStream;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.io.ObjectOutputStream;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.io.Serializable;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import java.nio.ByteBuffer;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.util.ArrayList;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.util.Comparator;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.util.HashSet;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.util.Iterator;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import java.util.List;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import java.util.Map;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import java.util.NavigableSet;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import java.util.PriorityQueue;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import java.util.Set;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import java.util.concurrent.BlockingQueue;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import java.util.concurrent.ConcurrentMap;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.TimeUnit;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import java.util.concurrent.atomic.LongAdder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import java.util.concurrent.locks.Lock;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.conf.Configuration;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.util.StringUtils;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.slf4j.Logger;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.slf4j.LoggerFactory;<a name="line.79"></a>
-<span class="sourceLineNo">080</span><a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.83"></a>
+<span class="sourceLineNo">028</span>import java.io.Serializable;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.nio.ByteBuffer;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.ArrayList;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.Comparator;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import java.util.HashSet;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.Iterator;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.List;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.Map;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.NavigableSet;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import java.util.PriorityQueue;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import java.util.Set;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import java.util.concurrent.BlockingQueue;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import java.util.concurrent.ConcurrentMap;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import java.util.concurrent.Executors;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.TimeUnit;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.LongAdder;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.locks.Lock;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.conf.Configuration;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.protobuf.ProtobufMagic;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.util.StringUtils;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.Logger;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.slf4j.LoggerFactory;<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos;<a name="line.83"></a>
 <span class="sourceLineNo">084</span><a name="line.84"></a>
 <span class="sourceLineNo">085</span>/**<a name="line.85"></a>
 <span class="sourceLineNo">086</span> * BucketCache uses {@link BucketAllocator} to allocate/free blocks, and uses<a name="line.86"></a>
@@ -172,1540 +172,1557 @@
 <span class="sourceLineNo">164</span>  private volatile boolean freeInProgress = false;<a name="line.164"></a>
 <span class="sourceLineNo">165</span>  private final Lock freeSpaceLock = new ReentrantLock();<a name="line.165"></a>
 <span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  private UniqueIndexMap&lt;Integer&gt; deserialiserMap = new UniqueIndexMap&lt;&gt;();<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  private final LongAdder heapSize = new LongAdder();<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  /** Current number of cached elements */<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.172"></a>
-<span class="sourceLineNo">173</span><a name="line.173"></a>
-<span class="sourceLineNo">174</span>  /** Cache access count (sequential ID) */<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  boolean wait_when_cache = false;<a name="line.181"></a>
+<span class="sourceLineNo">167</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  private final LongAdder heapSize = new LongAdder();<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  /** Current number of cached elements */<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>  /** Cache access count (sequential ID) */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.176"></a>
+<span class="sourceLineNo">177</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.177"></a>
+<span class="sourceLineNo">178</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.178"></a>
+<span class="sourceLineNo">179</span>  boolean wait_when_cache = false;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.181"></a>
 <span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  private final String persistencePath;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  private final long cacheCapacity;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /** Approximate block size */<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  private final long blockSize;<a name="line.188"></a>
-<span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  private final int ioErrorsTolerationDuration;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  // 1 min<a name="line.192"></a>
-<span class="sourceLineNo">193</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.193"></a>
-<span class="sourceLineNo">194</span><a name="line.194"></a>
-<span class="sourceLineNo">195</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  // reset after a successful read/write.<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private volatile long ioErrorStartTime = -1;<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * &lt;p&gt;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   */<a name="line.204"></a>
-<span class="sourceLineNo">205</span>  @VisibleForTesting<a name="line.205"></a>
-<span class="sourceLineNo">206</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        @Override<a name="line.210"></a>
-<span class="sourceLineNo">211</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.212"></a>
-<span class="sourceLineNo">213</span>          if (nameComparison != 0) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            return nameComparison;<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>          if (a.getOffset() == b.getOffset()) {<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            return 0;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            return -1;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>          }<a name="line.221"></a>
-<span class="sourceLineNo">222</span>          return 1;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      });<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.226"></a>
-<span class="sourceLineNo">227</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  // Allocate or free space for the block<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private BucketAllocator bucketAllocator;<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  private float acceptableFactor;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private float minFactor;<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private float extraFreeFactor;<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /** Single access bucket size */<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  private float singleFactor;<a name="line.243"></a>
-<span class="sourceLineNo">244</span><a name="line.244"></a>
-<span class="sourceLineNo">245</span>  /** Multiple access bucket size */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private float multiFactor;<a name="line.246"></a>
-<span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  /** In-memory bucket size */<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private float memoryFactor;<a name="line.249"></a>
-<span class="sourceLineNo">250</span><a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      IOException {<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  }<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.258"></a>
-<span class="sourceLineNo">259</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.259"></a>
-<span class="sourceLineNo">260</span>                     Configuration conf)<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      throws FileNotFoundException, IOException {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    long blockNumCapacity = capacity / blockSize;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      // Enough for about 32TB of cache!<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
-<span class="sourceLineNo">269</span><a name="line.269"></a>
-<span class="sourceLineNo">270</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.275"></a>
+<span class="sourceLineNo">183</span>  private final String persistencePath;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  private final long cacheCapacity;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  /** Approximate block size */<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  private final long blockSize;<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  private final int ioErrorsTolerationDuration;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  // 1 min<a name="line.190"></a>
+<span class="sourceLineNo">191</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  // reset after a successful read/write.<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  private volatile long ioErrorStartTime = -1;<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;p&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   */<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  @VisibleForTesting<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.207"></a>
+<span class="sourceLineNo">208</span>        @Override<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.210"></a>
+<span class="sourceLineNo">211</span>          if (nameComparison != 0) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>            return nameComparison;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>          }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>          if (a.getOffset() == b.getOffset()) {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>            return 0;<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>            return -1;<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>          return 1;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        }<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      });<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.224"></a>
+<span class="sourceLineNo">225</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>  // Allocate or free space for the block<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private BucketAllocator bucketAllocator;<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  private float acceptableFactor;<a name="line.232"></a>
+<span class="sourceLineNo">233</span><a name="line.233"></a>
+<span class="sourceLineNo">234</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  private float minFactor;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private float extraFreeFactor;<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>  /** Single access bucket size */<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  private float singleFactor;<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /** Multiple access bucket size */<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private float multiFactor;<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>  /** In-memory bucket size */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private float memoryFactor;<a name="line.247"></a>
+<span class="sourceLineNo">248</span><a name="line.248"></a>
+<span class="sourceLineNo">249</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      IOException {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  }<a name="line.254"></a>
+<span class="sourceLineNo">255</span><a name="line.255"></a>
+<span class="sourceLineNo">256</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.256"></a>
+<span class="sourceLineNo">257</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.257"></a>
+<span class="sourceLineNo">258</span>                     Configuration conf)<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      throws FileNotFoundException, IOException {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    long blockNumCapacity = capacity / blockSize;<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      // Enough for about 32TB of cache!<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
+<span class="sourceLineNo">267</span><a name="line.267"></a>
+<span class="sourceLineNo">268</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>    sanityCheckConfigs();<a name="line.275"></a>
 <span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    sanityCheckConfigs();<a name="line.277"></a>
-<span class="sourceLineNo">278</span><a name="line.278"></a>
-<span class="sourceLineNo">279</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.280"></a>
-<span class="sourceLineNo">281</span>        ", memoryFactor: " + memoryFactor);<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    this.cacheCapacity = capacity;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    this.persistencePath = persistencePath;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    this.blockSize = blockSize;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.286"></a>
-<span class="sourceLineNo">287</span><a name="line.287"></a>
-<span class="sourceLineNo">288</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>    assert writerQueues.size() == writerThreads.length;<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.294"></a>
+<span class="sourceLineNo">277</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.277"></a>
+<span class="sourceLineNo">278</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.278"></a>
+<span class="sourceLineNo">279</span>        ", memoryFactor: " + memoryFactor);<a name="line.279"></a>
+<span class="sourceLineNo">280</span><a name="line.280"></a>
+<span class="sourceLineNo">281</span>    this.cacheCapacity = capacity;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    this.persistencePath = persistencePath;<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    this.blockSize = blockSize;<a name="line.283"></a>
+<span class="sourceLineNo">284</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    }<a name="line.289"></a>
+<span class="sourceLineNo">290</span><a name="line.290"></a>
+<span class="sourceLineNo">291</span>    assert writerQueues.size() == writerThreads.length;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.294"></a>
 <span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        retrieveFromFile(bucketSizes);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      } catch (IOException ioex) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        LOG.error("Can't restore from file because of", ioex);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      } catch (ClassNotFoundException cnfe) {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        LOG.error("Can't restore from file in rebuild because can't deserialise",cnfe);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        throw new RuntimeException(cnfe);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    }<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    final String threadName = Thread.currentThread().getName();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    this.cacheEnabled = true;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      writerThreads[i].setDaemon(true);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    startWriterThreads();<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    // every five minutes.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.320"></a>
-<span class="sourceLineNo">321</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  }<a name="line.327"></a>
-<span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  private void sanityCheckConfigs() {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.337"></a>
-<span class="sourceLineNo">338</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  }<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * starting the threads.<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  @VisibleForTesting<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected void startWriterThreads() {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    for (WriterThread thread : writerThreads) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>      thread.start();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">296</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      try {<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        retrieveFromFile(bucketSizes);<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      } catch (IOException ioex) {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>        LOG.error("Can't restore from file[" + persistencePath + "] because of ", ioex);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    }<a name="line.302"></a>
+<span class="sourceLineNo">303</span>    final String threadName = Thread.currentThread().getName();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    this.cacheEnabled = true;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.306"></a>
+<span class="sourceLineNo">307</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>      writerThreads[i].setDaemon(true);<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    startWriterThreads();<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    // every five minutes.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.315"></a>
+<span class="sourceLineNo">316</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.317"></a>
+<span class="sourceLineNo">318</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.318"></a>
+<span class="sourceLineNo">319</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
+<span class="sourceLineNo">323</span><a name="line.323"></a>
+<span class="sourceLineNo">324</span>  private void sanityCheckConfigs() {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.325"></a>
+<span class="sourceLineNo">326</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.327"></a>
+<span class="sourceLineNo">328</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.332"></a>
+<span class="sourceLineNo">333</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.333"></a>
+<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * starting the threads.<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   */<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  @VisibleForTesting<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  protected void startWriterThreads() {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    for (WriterThread thread : writerThreads) {<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      thread.start();<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  }<a name="line.345"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  boolean isCacheEnabled() {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    return this.cacheEnabled;<a name="line.349"></a>
 <span class="sourceLineNo">350</span>  }<a name="line.350"></a>
 <span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>  @VisibleForTesting<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  boolean isCacheEnabled() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    return this.cacheEnabled;<a name="line.354"></a>
+<span class="sourceLineNo">352</span>  @Override<a name="line.352"></a>
+<span class="sourceLineNo">353</span>  public long getMaxSize() {<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    return this.cacheCapacity;<a name="line.354"></a>
 <span class="sourceLineNo">355</span>  }<a name="line.355"></a>
 <span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>  public long getMaxSize() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    return this.cacheCapacity;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  public String getIoEngine() {<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return ioEngine.toString();<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Get the IOEngine from the IO engine name<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   * @param ioEngineName<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param capacity<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   * @param persistencePath<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @return the IOEngine<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   * @throws IOException<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
-<span class="sourceLineNo">374</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      throws IOException {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      // the compatibility<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return new ByteBufferIOEngine(capacity);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    } else {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      throw new IllegalArgumentException(<a name="line.388"></a>
-<span class="sourceLineNo">389</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Cache the block with the specified name and buffer.<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   * @param cacheKey block's cache key<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param buf block buffer<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   */<a name="line.397"></a>
-<span class="sourceLineNo">398</span>  @Override<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    cacheBlock(cacheKey, buf, false);<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  }<a name="line.401"></a>
-<span class="sourceLineNo">402</span><a name="line.402"></a>
-<span class="sourceLineNo">403</span>  /**<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * Cache the block with the specified name and buffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   * @param cacheKey block's cache key<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @param cachedItem block buffer<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   * @param inMemory if block is in-memory<a name="line.407"></a>
-<span class="sourceLineNo">408</span>   */<a name="line.408"></a>
-<span class="sourceLineNo">409</span>  @Override<a name="line.409"></a>
-<span class="sourceLineNo">410</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * Cache the block to ramCache<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   * @param cacheKey block's cache key<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @param cachedItem block buffer<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @param inMemory if block is in-memory<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * @param wait if true, blocking wait when queue is full<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.421"></a>
-<span class="sourceLineNo">422</span>      boolean wait) {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    if (cacheEnabled) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>      } else {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      boolean inMemory, boolean wait) {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    if (!cacheEnabled) {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      return;<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    }<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.439"></a>
-<span class="sourceLineNo">440</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    RAMQueueEntry re =<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.447"></a>
-<span class="sourceLineNo">448</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.448"></a>
-<span class="sourceLineNo">449</span>     */<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      return;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    boolean successfulAddition = false;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    if (wait) {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      try {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.458"></a>
-<span class="sourceLineNo">459</span>      } catch (InterruptedException e) {<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        Thread.currentThread().interrupt();<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    } else {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>      successfulAddition = bq.offer(re);<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    }<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    if (!successfulAddition) {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>      ramCache.remove(cacheKey);<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      cacheStats.failInsert();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    } else {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      this.blockNumber.increment();<a name="line.469"></a>
-<span class="sourceLineNo">470</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      blocksByHFile.add(cacheKey);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    }<a name="line.472"></a>
-<span class="sourceLineNo">473</span>  }<a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Get the buffer of the block with the specified key.<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   * @param key block's cache key<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   */<a name="line.482"></a>
-<span class="sourceLineNo">483</span>  @Override<a name="line.483"></a>
-<span class="sourceLineNo">484</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      boolean updateCacheMetrics) {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    if (!cacheEnabled) {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>      return null;<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    }<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    if (re != null) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      if (updateCacheMetrics) {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      }<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      re.access(accessCount.incrementAndGet());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      return re.getData();<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    if (bucketEntry != null) {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      long start = System.nanoTime();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      try {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        lock.readLock().lock();<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // existence here.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>          // TODO : change this area - should be removed after server cells and<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // 12295 are available<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          int len = bucketEntry.getLength();<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          if (LOG.isTraceEnabled()) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.511"></a>
-<span class="sourceLineNo">512</span>          }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.513"></a>
-<span class="sourceLineNo">514</span>              bucketEntry.deserializerReference(this.deserialiserMap));<a name="line.514"></a>
-<span class="sourceLineNo">515</span>          long timeTaken = System.nanoTime() - start;<a name="line.515"></a>
-<span class="sourceLineNo">516</span>          if (updateCacheMetrics) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.517"></a>
-<span class="sourceLineNo">518</span>            cacheStats.ioHit(timeTaken);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>            bucketEntry.incrementRefCountAndGet();<a name="line.521"></a>
-<span class="sourceLineNo">522</span>          }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.523"></a>
-<span class="sourceLineNo">524</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>            ioErrorStartTime = -1;<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          }<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          return cachedBlock;<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        }<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      } catch (IOException ioex) {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        LOG.error("Failed reading block " + key + " from bucket cache", ioex);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>        checkIOErrorIsTolerated();<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      } finally {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        lock.readLock().unlock();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      }<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>    if (!repeat &amp;&amp; updateCacheMetrics) {<a name="line.536"></a>
-<span class="sourceLineNo">537</span>      cacheStats.miss(caching, key.isPrimary(), key.getBlockType());<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    }<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    return null;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  }<a name="line.540"></a>
-<span class="sourceLineNo">541</span><a name="line.541"></a>
-<span class="sourceLineNo">542</span>  @VisibleForTesting<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  void blockEvicted(BlockCacheKey cacheKey, BucketEntry bucketEntry, boolean decrementBlockNumber) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    bucketAllocator.freeBlock(bucketEntry.offset());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    realCacheSize.add(-1 * bucketEntry.getLength());<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    blocksByHFile.remove(cacheKey);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    if (decrementBlockNumber) {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.blockNumber.decrement();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
+<span class="sourceLineNo">357</span>  public String getIoEngine() {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    return ioEngine.toString();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Get the IOEngine from the IO engine name<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * @param ioEngineName<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * @param capacity<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * @param persistencePath<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return the IOEngine<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * @throws IOException<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throws IOException {<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.372"></a>
+<span class="sourceLineNo">373</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      // the compatibility<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.375"></a>
+<span class="sourceLineNo">376</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.378"></a>
+<span class="sourceLineNo">379</span>      return new ByteBufferIOEngine(capacity);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    } else {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      throw new IllegalArgumentException(<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  }<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * Cache the block with the specified name and buffer.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @param cacheKey block's cache key<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   * @param buf block buffer<a name="line.391"></a>
+<span class="sourceLineNo">392</span>   */<a name="line.392"></a>
+<span class="sourceLineNo">393</span>  @Override<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    cacheBlock(cacheKey, buf, false);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   * Cache the block with the specified name and buffer.<a name="line.399"></a>
+<span class="sourceLineNo">400</span>   * @param cacheKey block's cache key<a name="line.400"></a>
+<span class="sourceLineNo">401</span>   * @param cachedItem block buffer<a name="line.401"></a>
+<span class="sourceLineNo">402</span>   * @param inMemory if block is in-memory<a name="line.402"></a>
+<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  @Override<a name="line.404"></a>
+<span class="sourceLineNo">405</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * Cache the block to ramCache<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   * @param cacheKey block's cache key<a name="line.411"></a>
+<span class="sourceLineNo">412</span>   * @param cachedItem block buffer<a name="line.412"></a>
+<span class="sourceLineNo">413</span>   * @param inMemory if block is in-memory<a name="line.413"></a>
+<span class="sourceLineNo">414</span>   * @param wait if true, blocking wait when queue is full<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.416"></a>
+<span class="sourceLineNo">417</span>      boolean wait) {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    if (cacheEnabled) {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.420"></a>
+<span class="sourceLineNo">421</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.421"></a>
+<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      } else {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      }<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  }<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      boolean inMemory, boolean wait) {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    if (!cacheEnabled) {<a name="line.431"></a>
+<span class="sourceLineNo">432</span>      return;<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    RAMQueueEntry re =<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    /**<a name="line.438"></a>
+<span class="sourceLineNo">439</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.439"></a>
+<span class="sourceLineNo">440</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.440"></a>
+<span class="sourceLineNo">441</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.441"></a>
+<span class="sourceLineNo">442</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.442"></a>
+<span class="sourceLineNo">443</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>     */<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      return;<a name="line.446"></a>
+<span class="sourceLineNo">447</span>    }<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    boolean successfulAddition = false;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    if (wait) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      try {<a name="line.452"></a>
+<span class="sourceLineNo">453</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.453"></a>
+<span class="sourceLineNo">454</span>      } catch (InterruptedException e) {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>        Thread.currentThread().interrupt();<a name="line.455"></a>
+<span class="sourceLineNo">456</span>      }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>    } else {<a name="line.457"></a>
+<span class="sourceLineNo">458</span>      successfulAddition = bq.offer(re);<a name="line.458"></a>
+<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
+<span class="sourceLineNo">460</span>    if (!successfulAddition) {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>      ramCache.remove(cacheKey);<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      cacheStats.failInsert();<a name="line.462"></a>
+<span class="sourceLineNo">463</span>    } else {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      this.blockNumber.increment();<a name="line.464"></a>
+<span class="sourceLineNo">465</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.465"></a>
+<span class="sourceLineNo">466</span>      blocksByHFile.add(cacheKey);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>    }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
+<span class="sourceLineNo">469</span><a name="line.469"></a>
+<span class="sourceLineNo">470</span>  /**<a name="line.470"></a>
+<span class="sourceLineNo">471</span>   * Get the buffer of the block with the specified key.<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @param key block's cache key<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.474"></a>
+<span class="sourceLineNo">475</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.475"></a>
+<span class="sourceLineNo">476</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
+<span class="sourceLineNo">478</span>  @Override<a name="line.478"></a>
+<span class="sourceLineNo">479</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.479"></a>
+<span class="sourceLineNo">480</span>      boolean updateCacheMetrics) {<a name="line.480"></a>
+<span class="sourceLineNo">481</span>    if (!cacheEnabled) {<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      return null;<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    }<a name="line.483"></a>
+<span class="sourceLineNo">484</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    if (re != null) {<a name="line.485"></a>
+<span class="sourceLineNo">486</span>      if (updateCacheMetrics) {<a name="line.486"></a>
+<span class="sourceLineNo">487</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.487"></a>
+<span class="sourceLineNo">488</span>      }<a name="line.488"></a>
+<span class="sourceLineNo">489</span>      re.access(accessCount.incrementAndGet());<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      return re.getData();<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    }<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    if (bucketEntry != null) {<a name="line.493"></a>
+<span class="sourceLineNo">494</span>      long start = System.nanoTime();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.495"></a>
+<span class="sourceLineNo">496</span>      try {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>        lock.readLock().lock();<a name="line.497"></a>
+<span class="sourceLineNo">498</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.498"></a>
+<span class="sourceLineNo">499</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.499"></a>
+<span class="sourceLineNo">500</span>        // existence here.<a name="line.500"></a>
+<span class="sourceLineNo">501</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.501"></a>
+<span class="sourceLineNo">502</span>          // TODO : change this area - should be removed after server cells and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>          // 12295 are available<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          int len = bucketEntry.getLength();<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          if (LOG.isTraceEnabled()) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>          }<a name="line.507"></a>
+<span class="sourceLineNo">508</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.508"></a>
+<span class="sourceLineNo">509</span>              bucketEntry.deserializerReference());<a name="line.509"></a>
+<span class="sourceLineNo">510</span>          long timeTaken = System.nanoTime() - start;<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          if (updateCacheMetrics) {<a name="line.511"></a>
+<span class="sourceLineNo">512</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.512"></a>
+<span class="sourceLineNo">513</span>            cacheStats.ioHit(timeTaken);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
+<span class="sourceLineNo">515</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>            bucketEntry.incrementRefCountAndGet();<a name="line.516"></a>
+<span class="sourceLineNo">517</span>          }<a name="line.517"></a>
+<span class="sourceLineNo">518</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.518"></a>
+<span class="sourceLineNo">519</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.519"></a>
+<span class="sourceLineNo">520</span>            ioErrorStartTime = -1;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          }<a name="line.521"></a>
+<span class="sourceLineNo">522</span>          return cachedBlock;<a name="line.522"></a>
+<span c

<TRUNCATED>

[05/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testapidocs/src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html
----------------------------------------------------------------------
diff --git a/testapidocs/src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html b/testapidocs/src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html
index af2e5b1..fe2a7c8 100644
--- a/testapidocs/src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html
+++ b/testapidocs/src-html/org/apache/hadoop/hbase/MiniHBaseCluster.html
@@ -32,889 +32,915 @@
 <span class="sourceLineNo">024</span>import java.util.HashSet;<a name="line.24"></a>
 <span class="sourceLineNo">025</span>import java.util.List;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.util.Set;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.conf.Configuration;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.fs.FileSystem;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.regionserver.HRegion.FlushResult;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.regionserver.HRegionServer;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.regionserver.Region;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.security.User;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.test.MetricsAssertHelper;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.util.JVMClusterUtil;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.slf4j.Logger;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.slf4j.LoggerFactory;<a name="line.42"></a>
-<span class="sourceLineNo">043</span><a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse;<a name="line.47"></a>
-<span class="sourceLineNo">048</span><a name="line.48"></a>
-<span class="sourceLineNo">049</span>/**<a name="line.49"></a>
-<span class="sourceLineNo">050</span> * This class creates a single process HBase cluster.<a name="line.50"></a>
-<span class="sourceLineNo">051</span> * each server.  The master uses the 'default' FileSystem.  The RegionServers,<a name="line.51"></a>
-<span class="sourceLineNo">052</span> * if we are running on DistributedFilesystem, create a FileSystem instance<a name="line.52"></a>
-<span class="sourceLineNo">053</span> * each and will close down their instance on the way out.<a name="line.53"></a>
-<span class="sourceLineNo">054</span> */<a name="line.54"></a>
-<span class="sourceLineNo">055</span>@InterfaceAudience.Public<a name="line.55"></a>
-<span class="sourceLineNo">056</span>public class MiniHBaseCluster extends HBaseCluster {<a name="line.56"></a>
-<span class="sourceLineNo">057</span>  private static final Logger LOG = LoggerFactory.getLogger(MiniHBaseCluster.class.getName());<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  public LocalHBaseCluster hbaseCluster;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  private static int index;<a name="line.59"></a>
-<span class="sourceLineNo">060</span><a name="line.60"></a>
-<span class="sourceLineNo">061</span>  /**<a name="line.61"></a>
-<span class="sourceLineNo">062</span>   * Start a MiniHBaseCluster.<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   * @param conf Configuration to be used for cluster<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * @param numRegionServers initial number of region servers to start.<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   * @throws IOException<a name="line.65"></a>
-<span class="sourceLineNo">066</span>   */<a name="line.66"></a>
-<span class="sourceLineNo">067</span>  public MiniHBaseCluster(Configuration conf, int numRegionServers)<a name="line.67"></a>
-<span class="sourceLineNo">068</span>  throws IOException, InterruptedException {<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    this(conf, 1, numRegionServers);<a name="line.69"></a>
-<span class="sourceLineNo">070</span>  }<a name="line.70"></a>
-<span class="sourceLineNo">071</span><a name="line.71"></a>
-<span class="sourceLineNo">072</span>  /**<a name="line.72"></a>
-<span class="sourceLineNo">073</span>   * Start a MiniHBaseCluster.<a name="line.73"></a>
-<span class="sourceLineNo">074</span>   * @param conf Configuration to be used for cluster<a name="line.74"></a>
-<span class="sourceLineNo">075</span>   * @param numMasters initial number of masters to start.<a name="line.75"></a>
-<span class="sourceLineNo">076</span>   * @param numRegionServers initial number of region servers to start.<a name="line.76"></a>
-<span class="sourceLineNo">077</span>   * @throws IOException<a name="line.77"></a>
-<span class="sourceLineNo">078</span>   */<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  public MiniHBaseCluster(Configuration conf, int numMasters, int numRegionServers)<a name="line.79"></a>
-<span class="sourceLineNo">080</span>      throws IOException, InterruptedException {<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    this(conf, numMasters, numRegionServers, null, null);<a name="line.81"></a>
-<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
-<span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   * Start a MiniHBaseCluster.<a name="line.85"></a>
-<span class="sourceLineNo">086</span>   * @param conf Configuration to be used for cluster<a name="line.86"></a>
-<span class="sourceLineNo">087</span>   * @param numMasters initial number of masters to start.<a name="line.87"></a>
-<span class="sourceLineNo">088</span>   * @param numRegionServers initial number of region servers to start.<a name="line.88"></a>
-<span class="sourceLineNo">089</span>   */<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  public MiniHBaseCluster(Configuration conf, int numMasters, int numRegionServers,<a name="line.90"></a>
-<span class="sourceLineNo">091</span>         Class&lt;? extends HMaster&gt; masterClass,<a name="line.91"></a>
-<span class="sourceLineNo">092</span>         Class&lt;? extends MiniHBaseCluster.MiniHBaseClusterRegionServer&gt; regionserverClass)<a name="line.92"></a>
-<span class="sourceLineNo">093</span>      throws IOException, InterruptedException {<a name="line.93"></a>
-<span class="sourceLineNo">094</span>    this(conf, numMasters, numRegionServers, null, masterClass, regionserverClass);<a name="line.94"></a>
-<span class="sourceLineNo">095</span>  }<a name="line.95"></a>
-<span class="sourceLineNo">096</span><a name="line.96"></a>
-<span class="sourceLineNo">097</span>  /**<a name="line.97"></a>
-<span class="sourceLineNo">098</span>   * @param rsPorts Ports that RegionServer should use; pass ports if you want to test cluster<a name="line.98"></a>
-<span class="sourceLineNo">099</span>   *   restart where for sure the regionservers come up on same address+port (but<a name="line.99"></a>
-<span class="sourceLineNo">100</span>   *   just with different startcode); by default mini hbase clusters choose new<a name="line.100"></a>
-<span class="sourceLineNo">101</span>   *   arbitrary ports on each cluster start.<a name="line.101"></a>
-<span class="sourceLineNo">102</span>   * @throws IOException<a name="line.102"></a>
-<span class="sourceLineNo">103</span>   * @throws InterruptedException<a name="line.103"></a>
-<span class="sourceLineNo">104</span>   */<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  public MiniHBaseCluster(Configuration conf, int numMasters, int numRegionServers,<a name="line.105"></a>
-<span class="sourceLineNo">106</span>         List&lt;Integer&gt; rsPorts,<a name="line.106"></a>
-<span class="sourceLineNo">107</span>         Class&lt;? extends HMaster&gt; masterClass,<a name="line.107"></a>
-<span class="sourceLineNo">108</span>         Class&lt;? extends MiniHBaseCluster.MiniHBaseClusterRegionServer&gt; regionserverClass)<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      throws IOException, InterruptedException {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    super(conf);<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>    // Hadoop 2<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    CompatibilityFactory.getInstance(MetricsAssertHelper.class).init();<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span>    init(numMasters, numRegionServers, rsPorts, masterClass, regionserverClass);<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    this.initialClusterStatus = getClusterStatus();<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  }<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  public Configuration getConfiguration() {<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    return this.conf;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  }<a name="line.121"></a>
-<span class="sourceLineNo">122</span><a name="line.122"></a>
-<span class="sourceLineNo">123</span>  /**<a name="line.123"></a>
-<span class="sourceLineNo">124</span>   * Subclass so can get at protected methods (none at moment).  Also, creates<a name="line.124"></a>
-<span class="sourceLineNo">125</span>   * a FileSystem instance per instantiation.  Adds a shutdown own FileSystem<a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * on the way out. Shuts down own Filesystem only, not All filesystems as<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   * the FileSystem system exit hook does.<a name="line.127"></a>
-<span class="sourceLineNo">128</span>   */<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  public static class MiniHBaseClusterRegionServer extends HRegionServer {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    private Thread shutdownThread = null;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    private User user = null;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    /**<a name="line.132"></a>
-<span class="sourceLineNo">133</span>     * List of RegionServers killed so far. ServerName also comprises startCode of a server,<a name="line.133"></a>
-<span class="sourceLineNo">134</span>     * so any restarted instances of the same server will have different ServerName and will not<a name="line.134"></a>
-<span class="sourceLineNo">135</span>     * coincide with past dead ones. So there's no need to cleanup this list.<a name="line.135"></a>
-<span class="sourceLineNo">136</span>     */<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    static Set&lt;ServerName&gt; killedServers = new HashSet&lt;&gt;();<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>    public MiniHBaseClusterRegionServer(Configuration conf)<a name="line.139"></a>
-<span class="sourceLineNo">140</span>        throws IOException, InterruptedException {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      super(conf);<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      this.user = User.getCurrent();<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    }<a name="line.143"></a>
-<span class="sourceLineNo">144</span><a name="line.144"></a>
-<span class="sourceLineNo">145</span>    /*<a name="line.145"></a>
-<span class="sourceLineNo">146</span>     * @param c<a name="line.146"></a>
-<span class="sourceLineNo">147</span>     * @param currentfs We return this if we did not make a new one.<a name="line.147"></a>
-<span class="sourceLineNo">148</span>     * @param uniqueName Same name used to help identify the created fs.<a name="line.148"></a>
-<span class="sourceLineNo">149</span>     * @return A new fs instance if we are up on DistributeFileSystem.<a name="line.149"></a>
-<span class="sourceLineNo">150</span>     * @throws IOException<a name="line.150"></a>
-<span class="sourceLineNo">151</span>     */<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
-<span class="sourceLineNo">153</span>    @Override<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    protected void handleReportForDutyResponse(<a name="line.154"></a>
-<span class="sourceLineNo">155</span>        final RegionServerStartupResponse c) throws IOException {<a name="line.155"></a>
-<span class="sourceLineNo">156</span>      super.handleReportForDutyResponse(c);<a name="line.156"></a>
-<span class="sourceLineNo">157</span>      // Run this thread to shutdown our filesystem on way out.<a name="line.157"></a>
-<span class="sourceLineNo">158</span>      this.shutdownThread = new SingleFileSystemShutdownThread(getFileSystem());<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    }<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>    @Override<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    public void run() {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      try {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        this.user.runAs(new PrivilegedAction&lt;Object&gt;() {<a name="line.164"></a>
-<span class="sourceLineNo">165</span>          @Override<a name="line.165"></a>
-<span class="sourceLineNo">166</span>          public Object run() {<a name="line.166"></a>
-<span class="sourceLineNo">167</span>            runRegionServer();<a name="line.167"></a>
-<span class="sourceLineNo">168</span>            return null;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>          }<a name="line.169"></a>
-<span class="sourceLineNo">170</span>        });<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      } catch (Throwable t) {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        LOG.error("Exception in run", t);<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      } finally {<a name="line.173"></a>
-<span class="sourceLineNo">174</span>        // Run this on the way out.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>        if (this.shutdownThread != null) {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>          this.shutdownThread.start();<a name="line.176"></a>
-<span class="sourceLineNo">177</span>          Threads.shutdown(this.shutdownThread, 30000);<a name="line.177"></a>
-<span class="sourceLineNo">178</span>        }<a name="line.178"></a>
-<span class="sourceLineNo">179</span>      }<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    }<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>    private void runRegionServer() {<a name="line.182"></a>
-<span class="sourceLineNo">183</span>      super.run();<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    }<a name="line.184"></a>
-<span class="sourceLineNo">185</span><a name="line.185"></a>
-<span class="sourceLineNo">186</span>    @Override<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    protected void kill() {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      killedServers.add(getServerName());<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      super.kill();<a name="line.189"></a>
-<span class="sourceLineNo">190</span>    }<a name="line.190"></a>
-<span class="sourceLineNo">191</span><a name="line.191"></a>
-<span class="sourceLineNo">192</span>    @Override<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    public void abort(final String reason, final Throwable cause) {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      this.user.runAs(new PrivilegedAction&lt;Object&gt;() {<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        @Override<a name="line.195"></a>
-<span class="sourceLineNo">196</span>        public Object run() {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>          abortRegionServer(reason, cause);<a name="line.197"></a>
-<span class="sourceLineNo">198</span>          return null;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>        }<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      });<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
-<span class="sourceLineNo">202</span><a name="line.202"></a>
-<span class="sourceLineNo">203</span>    private void abortRegionServer(String reason, Throwable cause) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>      super.abort(reason, cause);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    }<a name="line.205"></a>
-<span class="sourceLineNo">206</span>  }<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>  /**<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   * Alternate shutdown hook.<a name="line.209"></a>
-<span class="sourceLineNo">210</span>   * Just shuts down the passed fs, not all as default filesystem hook does.<a name="line.210"></a>
-<span class="sourceLineNo">211</span>   */<a name="line.211"></a>
-<span class="sourceLineNo">212</span>  static class SingleFileSystemShutdownThread extends Thread {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    private final FileSystem fs;<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    SingleFileSystemShutdownThread(final FileSystem fs) {<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      super("Shutdown of " + fs);<a name="line.215"></a>
-<span class="sourceLineNo">216</span>      this.fs = fs;<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    }<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    @Override<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    public void run() {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      try {<a name="line.220"></a>
-<span class="sourceLineNo">221</span>        LOG.info("Hook closing fs=" + this.fs);<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        this.fs.close();<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      } catch (NullPointerException npe) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        LOG.debug("Need to fix these: " + npe.toString());<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      } catch (IOException e) {<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        LOG.warn("Running hook", e);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      }<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  }<a name="line.229"></a>
-<span class="sourceLineNo">230</span><a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private void init(final int nMasterNodes, final int nRegionNodes, List&lt;Integer&gt; rsPorts,<a name="line.231"></a>
-<span class="sourceLineNo">232</span>                 Class&lt;? extends HMaster&gt; masterClass,<a name="line.232"></a>
-<span class="sourceLineNo">233</span>                 Class&lt;? extends MiniHBaseCluster.MiniHBaseClusterRegionServer&gt; regionserverClass)<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  throws IOException, InterruptedException {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    try {<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      if (masterClass == null){<a name="line.236"></a>
-<span class="sourceLineNo">237</span>        masterClass =  HMaster.class;<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      }<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      if (regionserverClass == null){<a name="line.239"></a>
-<span class="sourceLineNo">240</span>        regionserverClass = MiniHBaseCluster.MiniHBaseClusterRegionServer.class;<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      }<a name="line.241"></a>
-<span class="sourceLineNo">242</span><a name="line.242"></a>
-<span class="sourceLineNo">243</span>      // start up a LocalHBaseCluster<a name="line.243"></a>
-<span class="sourceLineNo">244</span>      hbaseCluster = new LocalHBaseCluster(conf, nMasterNodes, 0,<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          masterClass, regionserverClass);<a name="line.245"></a>
-<span class="sourceLineNo">246</span><a name="line.246"></a>
-<span class="sourceLineNo">247</span>      // manually add the regionservers as other users<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      for (int i = 0; i &lt; nRegionNodes; i++) {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>        Configuration rsConf = HBaseConfiguration.create(conf);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>        if (rsPorts != null) {<a name="line.250"></a>
-<span class="sourceLineNo">251</span>          rsConf.setInt(HConstants.REGIONSERVER_PORT, rsPorts.get(i));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>        }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        User user = HBaseTestingUtility.getDifferentUser(rsConf,<a name="line.253"></a>
-<span class="sourceLineNo">254</span>            ".hfs."+index++);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>        hbaseCluster.addRegionServer(rsConf, i, user);<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      }<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>      hbaseCluster.startup();<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    } catch (IOException e) {<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      shutdown();<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      throw e;<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    } catch (Throwable t) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      LOG.error("Error starting cluster", t);<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      shutdown();<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      throw new IOException("Shutting down", t);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
-<span class="sourceLineNo">267</span>  }<a name="line.267"></a>
-<span class="sourceLineNo">268</span><a name="line.268"></a>
-<span class="sourceLineNo">269</span>  @Override<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  public void startRegionServer(String hostname, int port) throws IOException {<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    this.startRegionServer();<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  }<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>  @Override<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  public void killRegionServer(ServerName serverName) throws IOException {<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    HRegionServer server = getRegionServer(getRegionServerIndex(serverName));<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    if (server instanceof MiniHBaseClusterRegionServer) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      LOG.info("Killing " + server.toString());<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      ((MiniHBaseClusterRegionServer) server).kill();<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    } else {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      abortRegionServer(getRegionServerIndex(serverName));<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  }<a name="line.283"></a>
-<span class="sourceLineNo">284</span><a name="line.284"></a>
-<span class="sourceLineNo">285</span>  @Override<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  public boolean isKilledRS(ServerName serverName) {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    return MiniHBaseClusterRegionServer.killedServers.contains(serverName);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  }<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  @Override<a name="line.290"></a>
-<span class="sourceLineNo">291</span>  public void stopRegionServer(ServerName serverName) throws IOException {<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    stopRegionServer(getRegionServerIndex(serverName));<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
-<span class="sourceLineNo">294</span><a name="line.294"></a>
-<span class="sourceLineNo">295</span>  @Override<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  public void waitForRegionServerToStop(ServerName serverName, long timeout) throws IOException {<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    //ignore timeout for now<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    waitOnRegionServer(getRegionServerIndex(serverName));<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  }<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  @Override<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  public void startZkNode(String hostname, int port) throws IOException {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>    LOG.warn("Starting zookeeper nodes on mini cluster is not supported");<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  }<a name="line.304"></a>
-<span class="sourceLineNo">305</span><a name="line.305"></a>
-<span class="sourceLineNo">306</span>  @Override<a name="line.306"></a>
-<span class="sourceLineNo">307</span>  public void killZkNode(ServerName serverName) throws IOException {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    LOG.warn("Aborting zookeeper nodes on mini cluster is not supported");<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  }<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>  @Override<a name="line.311"></a>
-<span class="sourceLineNo">312</span>  public void stopZkNode(ServerName serverName) throws IOException {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    LOG.warn("Stopping zookeeper nodes on mini cluster is not supported");<a name="line.313"></a>
-<span class="sourceLineNo">314</span>  }<a name="line.314"></a>
-<span class="sourceLineNo">315</span><a name="line.315"></a>
-<span class="sourceLineNo">316</span>  @Override<a name="line.316"></a>
-<span class="sourceLineNo">317</span>  public void waitForZkNodeToStart(ServerName serverName, long timeout) throws IOException {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    LOG.warn("Waiting for zookeeper nodes to start on mini cluster is not supported");<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  @Override<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  public void waitForZkNodeToStop(ServerName serverName, long timeout) throws IOException {<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    LOG.warn("Waiting for zookeeper nodes to stop on mini cluster is not supported");<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  }<a name="line.324"></a>
-<span class="sourceLineNo">325</span><a name="line.325"></a>
-<span class="sourceLineNo">326</span>  @Override<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  public void startDataNode(ServerName serverName) throws IOException {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    LOG.warn("Starting datanodes on mini cluster is not supported");<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  }<a name="line.329"></a>
-<span class="sourceLineNo">330</span><a name="line.330"></a>
-<span class="sourceLineNo">331</span>  @Override<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  public void killDataNode(ServerName serverName) throws IOException {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    LOG.warn("Aborting datanodes on mini cluster is not supported");<a name="line.333"></a>
-<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
-<span class="sourceLineNo">335</span><a name="line.335"></a>
-<span class="sourceLineNo">336</span>  @Override<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  public void stopDataNode(ServerName serverName) throws IOException {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    LOG.warn("Stopping datanodes on mini cluster is not supported");<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  }<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  @Override<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  public void waitForDataNodeToStart(ServerName serverName, long timeout) throws IOException {<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    LOG.warn("Waiting for datanodes to start on mini cluster is not supported");<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  @Override<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  public void waitForDataNodeToStop(ServerName serverName, long timeout) throws IOException {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    LOG.warn("Waiting for datanodes to stop on mini cluster is not supported");<a name="line.348"></a>
-<span class="sourceLineNo">349</span>  }<a name="line.349"></a>
-<span class="sourceLineNo">350</span><a name="line.350"></a>
-<span class="sourceLineNo">351</span>  @Override<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  public void startMaster(String hostname, int port) throws IOException {<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    this.startMaster();<a name="line.353"></a>
-<span class="sourceLineNo">354</span>  }<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>  @Override<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public void killMaster(ServerName serverName) throws IOException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    abortMaster(getMasterIndex(serverName));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  @Override<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  public void stopMaster(ServerName serverName) throws IOException {<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    stopMaster(getMasterIndex(serverName));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  @Override<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  public void waitForMasterToStop(ServerName serverName, long timeout) throws IOException {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    //ignore timeout for now<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    waitOnMaster(getMasterIndex(serverName));<a name="line.369"></a>
+<span class="sourceLineNo">027</span><a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.fs.FileSystem;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.regionserver.HRegion.FlushResult;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.regionserver.HRegionServer;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.regionserver.Region;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.security.User;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.test.MetricsAssertHelper;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.JVMClusterUtil;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.slf4j.Logger;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.slf4j.LoggerFactory;<a name="line.43"></a>
+<span class="sourceLineNo">044</span><a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse;<a name="line.48"></a>
+<span class="sourceLineNo">049</span><a name="line.49"></a>
+<span class="sourceLineNo">050</span>/**<a name="line.50"></a>
+<span class="sourceLineNo">051</span> * This class creates a single process HBase cluster.<a name="line.51"></a>
+<span class="sourceLineNo">052</span> * each server.  The master uses the 'default' FileSystem.  The RegionServers,<a name="line.52"></a>
+<span class="sourceLineNo">053</span> * if we are running on DistributedFilesystem, create a FileSystem instance<a name="line.53"></a>
+<span class="sourceLineNo">054</span> * each and will close down their instance on the way out.<a name="line.54"></a>
+<span class="sourceLineNo">055</span> */<a name="line.55"></a>
+<span class="sourceLineNo">056</span>@InterfaceAudience.Public<a name="line.56"></a>
+<span class="sourceLineNo">057</span>public class MiniHBaseCluster extends HBaseCluster {<a name="line.57"></a>
+<span class="sourceLineNo">058</span>  private static final Logger LOG = LoggerFactory.getLogger(MiniHBaseCluster.class.getName());<a name="line.58"></a>
+<span class="sourceLineNo">059</span>  public LocalHBaseCluster hbaseCluster;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>  private static int index;<a name="line.60"></a>
+<span class="sourceLineNo">061</span><a name="line.61"></a>
+<span class="sourceLineNo">062</span>  /**<a name="line.62"></a>
+<span class="sourceLineNo">063</span>   * Start a MiniHBaseCluster.<a name="line.63"></a>
+<span class="sourceLineNo">064</span>   * @param conf Configuration to be used for cluster<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   * @param numRegionServers initial number of region servers to start.<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   * @throws IOException<a name="line.66"></a>
+<span class="sourceLineNo">067</span>   */<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  public MiniHBaseCluster(Configuration conf, int numRegionServers)<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  throws IOException, InterruptedException {<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    this(conf, 1, numRegionServers);<a name="line.70"></a>
+<span class="sourceLineNo">071</span>  }<a name="line.71"></a>
+<span class="sourceLineNo">072</span><a name="line.72"></a>
+<span class="sourceLineNo">073</span>  /**<a name="line.73"></a>
+<span class="sourceLineNo">074</span>   * Start a MiniHBaseCluster.<a name="line.74"></a>
+<span class="sourceLineNo">075</span>   * @param conf Configuration to be used for cluster<a name="line.75"></a>
+<span class="sourceLineNo">076</span>   * @param numMasters initial number of masters to start.<a name="line.76"></a>
+<span class="sourceLineNo">077</span>   * @param numRegionServers initial number of region servers to start.<a name="line.77"></a>
+<span class="sourceLineNo">078</span>   * @throws IOException<a name="line.78"></a>
+<span class="sourceLineNo">079</span>   */<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  public MiniHBaseCluster(Configuration conf, int numMasters, int numRegionServers)<a name="line.80"></a>
+<span class="sourceLineNo">081</span>      throws IOException, InterruptedException {<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    this(conf, numMasters, numRegionServers, null, null);<a name="line.82"></a>
+<span class="sourceLineNo">083</span>  }<a name="line.83"></a>
+<span class="sourceLineNo">084</span><a name="line.84"></a>
+<span class="sourceLineNo">085</span>  /**<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   * Start a MiniHBaseCluster.<a name="line.86"></a>
+<span class="sourceLineNo">087</span>   * @param conf Configuration to be used for cluster<a name="line.87"></a>
+<span class="sourceLineNo">088</span>   * @param numMasters initial number of masters to start.<a name="line.88"></a>
+<span class="sourceLineNo">089</span>   * @param numRegionServers initial number of region servers to start.<a name="line.89"></a>
+<span class="sourceLineNo">090</span>   */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>  public MiniHBaseCluster(Configuration conf, int numMasters, int numRegionServers,<a name="line.91"></a>
+<span class="sourceLineNo">092</span>         Class&lt;? extends HMaster&gt; masterClass,<a name="line.92"></a>
+<span class="sourceLineNo">093</span>         Class&lt;? extends MiniHBaseCluster.MiniHBaseClusterRegionServer&gt; regionserverClass)<a name="line.93"></a>
+<span class="sourceLineNo">094</span>      throws IOException, InterruptedException {<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    this(conf, numMasters, numRegionServers, null, masterClass, regionserverClass);<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  }<a name="line.96"></a>
+<span class="sourceLineNo">097</span><a name="line.97"></a>
+<span class="sourceLineNo">098</span>  /**<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   * @param rsPorts Ports that RegionServer should use; pass ports if you want to test cluster<a name="line.99"></a>
+<span class="sourceLineNo">100</span>   *   restart where for sure the regionservers come up on same address+port (but<a name="line.100"></a>
+<span class="sourceLineNo">101</span>   *   just with different startcode); by default mini hbase clusters choose new<a name="line.101"></a>
+<span class="sourceLineNo">102</span>   *   arbitrary ports on each cluster start.<a name="line.102"></a>
+<span class="sourceLineNo">103</span>   * @throws IOException<a name="line.103"></a>
+<span class="sourceLineNo">104</span>   * @throws InterruptedException<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   */<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  public MiniHBaseCluster(Configuration conf, int numMasters, int numRegionServers,<a name="line.106"></a>
+<span class="sourceLineNo">107</span>         List&lt;Integer&gt; rsPorts,<a name="line.107"></a>
+<span class="sourceLineNo">108</span>         Class&lt;? extends HMaster&gt; masterClass,<a name="line.108"></a>
+<span class="sourceLineNo">109</span>         Class&lt;? extends MiniHBaseCluster.MiniHBaseClusterRegionServer&gt; regionserverClass)<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      throws IOException, InterruptedException {<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    super(conf);<a name="line.111"></a>
+<span class="sourceLineNo">112</span><a name="line.112"></a>
+<span class="sourceLineNo">113</span>    // Hadoop 2<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    CompatibilityFactory.getInstance(MetricsAssertHelper.class).init();<a name="line.114"></a>
+<span class="sourceLineNo">115</span><a name="line.115"></a>
+<span class="sourceLineNo">116</span>    init(numMasters, numRegionServers, rsPorts, masterClass, regionserverClass);<a name="line.116"></a>
+<span class="sourceLineNo">117</span>    this.initialClusterStatus = getClusterStatus();<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  }<a name="line.118"></a>
+<span class="sourceLineNo">119</span><a name="line.119"></a>
+<span class="sourceLineNo">120</span>  public Configuration getConfiguration() {<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    return this.conf;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  }<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>  /**<a name="line.124"></a>
+<span class="sourceLineNo">125</span>   * Subclass so can get at protected methods (none at moment).  Also, creates<a name="line.125"></a>
+<span class="sourceLineNo">126</span>   * a FileSystem instance per instantiation.  Adds a shutdown own FileSystem<a name="line.126"></a>
+<span class="sourceLineNo">127</span>   * on the way out. Shuts down own Filesystem only, not All filesystems as<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   * the FileSystem system exit hook does.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>   */<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  public static class MiniHBaseClusterRegionServer extends HRegionServer {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    private Thread shutdownThread = null;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    private User user = null;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    /**<a name="line.133"></a>
+<span class="sourceLineNo">134</span>     * List of RegionServers killed so far. ServerName also comprises startCode of a server,<a name="line.134"></a>
+<span class="sourceLineNo">135</span>     * so any restarted instances of the same server will have different ServerName and will not<a name="line.135"></a>
+<span class="sourceLineNo">136</span>     * coincide with past dead ones. So there's no need to cleanup this list.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>     */<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    static Set&lt;ServerName&gt; killedServers = new HashSet&lt;&gt;();<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>    public MiniHBaseClusterRegionServer(Configuration conf)<a name="line.140"></a>
+<span class="sourceLineNo">141</span>        throws IOException, InterruptedException {<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      super(conf);<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      this.user = User.getCurrent();<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    }<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>    /*<a name="line.146"></a>
+<span class="sourceLineNo">147</span>     * @param c<a name="line.147"></a>
+<span class="sourceLineNo">148</span>     * @param currentfs We return this if we did not make a new one.<a name="line.148"></a>
+<span class="sourceLineNo">149</span>     * @param uniqueName Same name used to help identify the created fs.<a name="line.149"></a>
+<span class="sourceLineNo">150</span>     * @return A new fs instance if we are up on DistributeFileSystem.<a name="line.150"></a>
+<span class="sourceLineNo">151</span>     * @throws IOException<a name="line.151"></a>
+<span class="sourceLineNo">152</span>     */<a name="line.152"></a>
+<span class="sourceLineNo">153</span><a name="line.153"></a>
+<span class="sourceLineNo">154</span>    @Override<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    protected void handleReportForDutyResponse(<a name="line.155"></a>
+<span class="sourceLineNo">156</span>        final RegionServerStartupResponse c) throws IOException {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      super.handleReportForDutyResponse(c);<a name="line.157"></a>
+<span class="sourceLineNo">158</span>      // Run this thread to shutdown our filesystem on way out.<a name="line.158"></a>
+<span class="sourceLineNo">159</span>      this.shutdownThread = new SingleFileSystemShutdownThread(getFileSystem());<a name="line.159"></a>
+<span class="sourceLineNo">160</span>    }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>    @Override<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    public void run() {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      try {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>        this.user.runAs(new PrivilegedAction&lt;Object&gt;() {<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          @Override<a name="line.166"></a>
+<span class="sourceLineNo">167</span>          public Object run() {<a name="line.167"></a>
+<span class="sourceLineNo">168</span>            runRegionServer();<a name="line.168"></a>
+<span class="sourceLineNo">169</span>            return null;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>          }<a name="line.170"></a>
+<span class="sourceLineNo">171</span>        });<a name="line.171"></a>
+<span class="sourceLineNo">172</span>      } catch (Throwable t) {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>        LOG.error("Exception in run", t);<a name="line.173"></a>
+<span class="sourceLineNo">174</span>      } finally {<a name="line.174"></a>
+<span class="sourceLineNo">175</span>        // Run this on the way out.<a name="line.175"></a>
+<span class="sourceLineNo">176</span>        if (this.shutdownThread != null) {<a name="line.176"></a>
+<span class="sourceLineNo">177</span>          this.shutdownThread.start();<a name="line.177"></a>
+<span class="sourceLineNo">178</span>          Threads.shutdown(this.shutdownThread, 30000);<a name="line.178"></a>
+<span class="sourceLineNo">179</span>        }<a name="line.179"></a>
+<span class="sourceLineNo">180</span>      }<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    }<a name="line.181"></a>
+<span class="sourceLineNo">182</span><a name="line.182"></a>
+<span class="sourceLineNo">183</span>    private void runRegionServer() {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>      super.run();<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    }<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>    @Override<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    protected void kill() {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>      killedServers.add(getServerName());<a name="line.189"></a>
+<span class="sourceLineNo">190</span>      super.kill();<a name="line.190"></a>
+<span class="sourceLineNo">191</span>    }<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>    @Override<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    public void abort(final String reason, final Throwable cause) {<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      this.user.runAs(new PrivilegedAction&lt;Object&gt;() {<a name="line.195"></a>
+<span class="sourceLineNo">196</span>        @Override<a name="line.196"></a>
+<span class="sourceLineNo">197</span>        public Object run() {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>          abortRegionServer(reason, cause);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>          return null;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>        }<a name="line.200"></a>
+<span class="sourceLineNo">201</span>      });<a name="line.201"></a>
+<span class="sourceLineNo">202</span>    }<a name="line.202"></a>
+<span class="sourceLineNo">203</span><a name="line.203"></a>
+<span class="sourceLineNo">204</span>    private void abortRegionServer(String reason, Throwable cause) {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>      super.abort(reason, cause);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>    }<a name="line.206"></a>
+<span class="sourceLineNo">207</span>  }<a name="line.207"></a>
+<span class="sourceLineNo">208</span><a name="line.208"></a>
+<span class="sourceLineNo">209</span>  /**<a name="line.209"></a>
+<span class="sourceLineNo">210</span>   * Alternate shutdown hook.<a name="line.210"></a>
+<span class="sourceLineNo">211</span>   * Just shuts down the passed fs, not all as default filesystem hook does.<a name="line.211"></a>
+<span class="sourceLineNo">212</span>   */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>  static class SingleFileSystemShutdownThread extends Thread {<a name="line.213"></a>
+<span class="sourceLineNo">214</span>    private final FileSystem fs;<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    SingleFileSystemShutdownThread(final FileSystem fs) {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      super("Shutdown of " + fs);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      this.fs = fs;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    }<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    @Override<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    public void run() {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>      try {<a name="line.221"></a>
+<span class="sourceLineNo">222</span>        LOG.info("Hook closing fs=" + this.fs);<a name="line.222"></a>
+<span class="sourceLineNo">223</span>        this.fs.close();<a name="line.223"></a>
+<span class="sourceLineNo">224</span>      } catch (NullPointerException npe) {<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        LOG.debug("Need to fix these: " + npe.toString());<a name="line.225"></a>
+<span class="sourceLineNo">226</span>      } catch (IOException e) {<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        LOG.warn("Running hook", e);<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  }<a name="line.230"></a>
+<span class="sourceLineNo">231</span><a name="line.231"></a>
+<span class="sourceLineNo">232</span>  private void init(final int nMasterNodes, final int nRegionNodes, List&lt;Integer&gt; rsPorts,<a name="line.232"></a>
+<span class="sourceLineNo">233</span>                 Class&lt;? extends HMaster&gt; masterClass,<a name="line.233"></a>
+<span class="sourceLineNo">234</span>                 Class&lt;? extends MiniHBaseCluster.MiniHBaseClusterRegionServer&gt; regionserverClass)<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  throws IOException, InterruptedException {<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    try {<a name="line.236"></a>
+<span class="sourceLineNo">237</span>      if (masterClass == null){<a name="line.237"></a>
+<span class="sourceLineNo">238</span>        masterClass =  HMaster.class;<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      }<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      if (regionserverClass == null){<a name="line.240"></a>
+<span class="sourceLineNo">241</span>        regionserverClass = MiniHBaseCluster.MiniHBaseClusterRegionServer.class;<a name="line.241"></a>
+<span class="sourceLineNo">242</span>      }<a name="line.242"></a>
+<span class="sourceLineNo">243</span><a name="line.243"></a>
+<span class="sourceLineNo">244</span>      // start up a LocalHBaseCluster<a name="line.244"></a>
+<span class="sourceLineNo">245</span>      hbaseCluster = new LocalHBaseCluster(conf, nMasterNodes, 0,<a name="line.245"></a>
+<span class="sourceLineNo">246</span>          masterClass, regionserverClass);<a name="line.246"></a>
+<span class="sourceLineNo">247</span><a name="line.247"></a>
+<span class="sourceLineNo">248</span>      // manually add the regionservers as other users<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      for (int i = 0; i &lt; nRegionNodes; i++) {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>        Configuration rsConf = HBaseConfiguration.create(conf);<a name="line.250"></a>
+<span class="sourceLineNo">251</span>        if (rsPorts != null) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>          rsConf.setInt(HConstants.REGIONSERVER_PORT, rsPorts.get(i));<a name="line.252"></a>
+<span class="sourceLineNo">253</span>        }<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        User user = HBaseTestingUtility.getDifferentUser(rsConf,<a name="line.254"></a>
+<span class="sourceLineNo">255</span>            ".hfs."+index++);<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        hbaseCluster.addRegionServer(rsConf, i, user);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
+<span class="sourceLineNo">258</span><a name="line.258"></a>
+<span class="sourceLineNo">259</span>      hbaseCluster.startup();<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    } catch (IOException e) {<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      shutdown();<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      throw e;<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    } catch (Throwable t) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      LOG.error("Error starting cluster", t);<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      shutdown();<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      throw new IOException("Shutting down", t);<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    }<a name="line.267"></a>
+<span class="sourceLineNo">268</span>  }<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>  @Override<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  public void startRegionServer(String hostname, int port) throws IOException {<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    this.startRegionServer();<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>  @Override<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  public void killRegionServer(ServerName serverName) throws IOException {<a name="line.276"></a>
+<span class="sourceLineNo">277</span>    HRegionServer server = getRegionServer(getRegionServerIndex(serverName));<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    if (server instanceof MiniHBaseClusterRegionServer) {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      LOG.info("Killing " + server.toString());<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      ((MiniHBaseClusterRegionServer) server).kill();<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    } else {<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      abortRegionServer(getRegionServerIndex(serverName));<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  @Override<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  public boolean isKilledRS(ServerName serverName) {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    return MiniHBaseClusterRegionServer.killedServers.contains(serverName);<a name="line.288"></a>
+<span class="sourceLineNo">289</span>  }<a name="line.289"></a>
+<span class="sourceLineNo">290</span><a name="line.290"></a>
+<span class="sourceLineNo">291</span>  @Override<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  public void stopRegionServer(ServerName serverName) throws IOException {<a name="line.292"></a>
+<span class="sourceLineNo">293</span>    stopRegionServer(getRegionServerIndex(serverName));<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  }<a name="line.294"></a>
+<span class="sourceLineNo">295</span><a name="line.295"></a>
+<span class="sourceLineNo">296</span>  @Override<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  public void waitForRegionServerToStop(ServerName serverName, long timeout) throws IOException {<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    //ignore timeout for now<a name="line.298"></a>
+<span class="sourceLineNo">299</span>    waitOnRegionServer(getRegionServerIndex(serverName));<a name="line.299"></a>
+<span class="sourceLineNo">300</span>  }<a name="line.300"></a>
+<span class="sourceLineNo">301</span><a name="line.301"></a>
+<span class="sourceLineNo">302</span>  @Override<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  public void startZkNode(String hostname, int port) throws IOException {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    LOG.warn("Starting zookeeper nodes on mini cluster is not supported");<a name="line.304"></a>
+<span class="sourceLineNo">305</span>  }<a name="line.305"></a>
+<span class="sourceLineNo">306</span><a name="line.306"></a>
+<span class="sourceLineNo">307</span>  @Override<a name="line.307"></a>
+<span class="sourceLineNo">308</span>  public void killZkNode(ServerName serverName) throws IOException {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    LOG.warn("Aborting zookeeper nodes on mini cluster is not supported");<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  @Override<a name="line.312"></a>
+<span class="sourceLineNo">313</span>  public void stopZkNode(ServerName serverName) throws IOException {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    LOG.warn("Stopping zookeeper nodes on mini cluster is not supported");<a name="line.314"></a>
+<span class="sourceLineNo">315</span>  }<a name="line.315"></a>
+<span class="sourceLineNo">316</span><a name="line.316"></a>
+<span class="sourceLineNo">317</span>  @Override<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  public void waitForZkNodeToStart(ServerName serverName, long timeout) throws IOException {<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    LOG.warn("Waiting for zookeeper nodes to start on mini cluster is not supported");<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  }<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>  @Override<a name="line.322"></a>
+<span class="sourceLineNo">323</span>  public void waitForZkNodeToStop(ServerName serverName, long timeout) throws IOException {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    LOG.warn("Waiting for zookeeper nodes to stop on mini cluster is not supported");<a name="line.324"></a>
+<span class="sourceLineNo">325</span>  }<a name="line.325"></a>
+<span class="sourceLineNo">326</span><a name="line.326"></a>
+<span class="sourceLineNo">327</span>  @Override<a name="line.327"></a>
+<span class="sourceLineNo">328</span>  public void startDataNode(ServerName serverName) throws IOException {<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    LOG.warn("Starting datanodes on mini cluster is not supported");<a name="line.329"></a>
+<span class="sourceLineNo">330</span>  }<a name="line.330"></a>
+<span class="sourceLineNo">331</span><a name="line.331"></a>
+<span class="sourceLineNo">332</span>  @Override<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public void killDataNode(ServerName serverName) throws IOException {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    LOG.warn("Aborting datanodes on mini cluster is not supported");<a name="line.334"></a>
+<span class="sourceLineNo">335</span>  }<a name="line.335"></a>
+<span class="sourceLineNo">336</span><a name="line.336"></a>
+<span class="sourceLineNo">337</span>  @Override<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  public void stopDataNode(ServerName serverName) throws IOException {<a name="line.338"></a>
+<span class="sourceLineNo">339</span>    LOG.warn("Stopping datanodes on mini cluster is not supported");<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  }<a name="line.340"></a>
+<span class="sourceLineNo">341</span><a name="line.341"></a>
+<span class="sourceLineNo">342</span>  @Override<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  public void waitForDataNodeToStart(ServerName serverName, long timeout) throws IOException {<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    LOG.warn("Waiting for datanodes to start on mini cluster is not supported");<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  }<a name="line.345"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @Override<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  public void waitForDataNodeToStop(ServerName serverName, long timeout) throws IOException {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    LOG.warn("Waiting for datanodes to stop on mini cluster is not supported");<a name="line.349"></a>
+<span class="sourceLineNo">350</span>  }<a name="line.350"></a>
+<span class="sourceLineNo">351</span><a name="line.351"></a>
+<span class="sourceLineNo">352</span>  @Override<a name="line.352"></a>
+<span class="sourceLineNo">353</span>  public void startNameNode(ServerName serverName) throws IOException {<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    LOG.warn("Starting namenodes on mini cluster is not supported");<a name="line.354"></a>
+<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
+<span class="sourceLineNo">356</span><a name="line.356"></a>
+<span class="sourceLineNo">357</span>  @Override<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  public void killNameNode(ServerName serverName) throws IOException {<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    LOG.warn("Aborting namenodes on mini cluster is not supported");<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  @Override<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  public void stopNameNode(ServerName serverName) throws IOException {<a name="line.363"></a>
+<span class="sourceLineNo">364</span>    LOG.warn("Stopping namenodes on mini cluster is not supported");<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  }<a name="line.365"></a>
+<span class="sourceLineNo">366</span><a name="line.366"></a>
+<span class="sourceLineNo">367</span>  @Override<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  public void waitForNameNodeToStart(ServerName serverName, long timeout) throws IOException {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>    LOG.warn("Waiting for namenodes to start on mini cluster is not supported");<a name="line.369"></a>
 <span class="sourceLineNo">370</span>  }<a name="line.370"></a>
 <span class="sourceLineNo">371</span><a name="line.371"></a>
-<span class="sourceLineNo">372</span>  /**<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * Starts a region server thread running<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * @throws IOException<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   * @return New RegionServerThread<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   */<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  public JVMClusterUtil.RegionServerThread startRegionServer()<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      throws IOException {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    final Configuration newConf = HBaseConfiguration.create(conf);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    User rsUser =<a name="line.381"></a>
-<span class="sourceLineNo">382</span>        HBaseTestingUtility.getDifferentUser(newConf, ".hfs."+index++);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    JVMClusterUtil.RegionServerThread t =  null;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>    try {<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      t = hbaseCluster.addRegionServer(<a name="line.385"></a>
-<span class="sourceLineNo">386</span>          newConf, hbaseCluster.getRegionServers().size(), rsUser);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      t.start();<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      t.waitForServerOnline();<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    } catch (InterruptedException ie) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>      throw new IOException("Interrupted adding regionserver to cluster", ie);<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    }<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    return t;<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  }<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  /**<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * Starts a region server thread and waits until its processed by master. Throws an exception<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * when it can't start a region server or when the region server is not processed by master<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * within the timeout.<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   * @return New RegionServerThread<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   */<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  public JVMClusterUtil.RegionServerThread startRegionServerAndWait(long timeout)<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      throws IOException {<a name="line.403"></a>
-<span class="sourceLineNo">404</span><a name="line.404"></a>
-<span class="sourceLineNo">405</span>    JVMClusterUtil.RegionServerThread t =  startRegionServer();<a name="line.405"></a>
-<span class="sourceLineNo">406</span>    ServerName rsServerName = t.getRegionServer().getServerName();<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>    long start = System.currentTimeMillis();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    ClusterStatus clusterStatus = getClusterStatus();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    while ((System.currentTimeMillis() - start) &lt; timeout) {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      if (clusterStatus != null &amp;&amp; clusterStatus.getServers().contains(rsServerName)) {<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        return t;<a name="line.412"></a>
-<span class="sourceLineNo">413</span>      }<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      Threads.sleep(100);<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    }<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    if (t.getRegionServer().isOnline()) {<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      throw new IOException("RS: " + rsServerName + " online, but not processed by master");<a name="line.417"></a>
-<span class="sourceLineNo">418</span>    } else {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      throw new IOException("RS: " + rsServerName + " is offline");<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    }<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  }<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>  /**<a name="line.423"></a>
-<span class="sourceLineNo">424</span>   * Cause a region server to exit doing basic clean up only on its way out.<a name="line.424"></a>
-<span class="sourceLineNo">425</span>   * @param serverNumber  Used as index into a list.<a name="line.425"></a>
-<span class="sourceLineNo">426</span>   */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>  public String abortRegionServer(int serverNumber) {<a name="line.427"></a>
-<span class="sourceLineNo">428</span>    HRegionServer server = getRegionServer(serverNumber);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    LOG.info("Aborting " + server.toString());<a name="line.429"></a>
-<span class="sourceLineNo">430</span>    server.abort("Aborting for tests", new Exception("Trace info"));<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    return server.toString();<a name="line.431"></a>
-<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
+<span class="sourceLineNo">372</span>  @Override<a name="line.372"></a>
+<span class="sourceLineNo">373</span>  public void waitForNameNodeToStop(ServerName serverName, long timeout) throws IOException {<a name="line.373"></a>
+<span class="sourceLineNo">374</span>    LOG.warn("Waiting for namenodes to stop on mini cluster is not supported");<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  }<a name="line.375"></a>
+<span class="sourceLineNo">376</span><a name="line.376"></a>
+<span class="sourceLineNo">377</span>  @Override<a name="line.377"></a>
+<span class="sourceLineNo">378</span>  public void startMaster(String hostname, int port) throws IOException {<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.startMaster();<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  @Override<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  public void killMaster(ServerName serverName) throws IOException {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>    abortMaster(getMasterIndex(serverName));<a name="line.384"></a>
+<span class="sourceLineNo">385</span>  }<a name="line.385"></a>
+<span class="sourceLineNo">386</span><a name="line.386"></a>
+<span class="sourceLineNo">387</span>  @Override<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public void stopMaster(ServerName serverName) throws IOException {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    stopMaster(getMasterIndex(serverName));<a name="line.389"></a>
+<span class="sourceLineNo">390</span>  }<a name="line.390"></a>
+<span class="sourceLineNo">391</span><a name="line.391"></a>
+<span class="sourceLineNo">392</span>  @Override<a name="line.392"></a>
+<span class="sourceLineNo">393</span>  public void waitForMasterToStop(ServerName serverName, long timeout) throws IOException {<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    //ignore timeout for now<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    waitOnMaster(getMasterIndex(serverName));<a name="line.395"></a>
+<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   * Starts a region server thread running<a name="line.399"></a>
+<span class="sourceLineNo">400</span>   *<a name="line.400"></a>
+<span class="sourceLineNo">401</span>   * @throws IOException<a name="line.401"></a>
+<span class="sourceLineNo">402</span>   * @return New RegionServerThread<a name="line.402"></a>
+<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  public JVMClusterUtil.RegionServerThread startRegionServer()<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      throws IOException {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    final Configuration newConf = HBaseConfiguration.create(conf);<a name="line.406"></a>
+<span class="sourceLineNo">407</span>    User rsUser =<a name="line.407"></a>
+<span class="sourceLineNo">408</span>        HBaseTestingUtility.getDifferentUser(newConf, ".hfs."+index++);<a name="line.408"></a>
+<span class="sourceLineNo">409</span>    JVMClusterUtil.RegionServerThread t =  null;<a name="line.409"></a>
+<span class="sourceLineNo">410</span>    try {<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      t = hbaseCluster.addRegionServer(<a name="line.411"></a>
+<span class="sourceLineNo">412</span>          newConf, hbaseCluster.getRegionServers().size(), rsUser);<a name="line.412"></a>
+<span class="sourceLineNo">413</span>      t.start();<a name="line.413"></a>
+<span class="sourceLineNo">414</span>      t.waitForServerOnline();<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    } catch (InterruptedException ie) {<a name="line.415"></a>
+<span class="sourceLineNo">416</span>      throw new IOException("Interrupted adding regionserver to cluster", ie);<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    }<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    return t;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  }<a name="line.419"></a>
+<span class="sourceLineNo">420</span><a name="line.420"></a>
+<span class="sourceLineNo">421</span>  /**<a name="line.421"></a>
+<span class="sourceLineNo">422</span>   * Starts a region server thread and waits until its processed by master. Throws an exception<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * when it can't start a region server or when the region server is not processed by master<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * within the timeout.<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   *<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   * @return New RegionServerThread<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  public JVMClusterUtil.RegionServerThread startRegionServerAndWait(long timeout)<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      throws IOException {<a name="line.429"></a>
+<span class="sourceLineNo">430</span><a name="line.430"></a>
+<span class="sourceLineNo">431</span>    JVMClusterUtil.RegionServerThread t =  startRegionServer();<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    ServerName rsServerName = t.getRegionServer().getServerName();<a name="line.432"></a>
 <span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * Shut down the specified region server cleanly<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   *<a name="line.436"></a>
-<span class="sourceLineNo">437</span>   * @param serverNumber  Used as index into a list.<a name="line.437"></a>
-<span class="sourceLineNo">438</span>   * @return the region server that was stopped<a name="line.438"></a>
-<span class="sourceLineNo">439</span>   */<a name="line.439"></a>
-<span class="sourceLineNo">440</span>  public JVMClusterUtil.RegionServerThread stopRegionServer(int serverNumber) {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    return stopRegionServer(serverNumber, true);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>  /**<a name="line.444"></a>
-<span class="sourceLineNo">445</span>   * Shut down the specified region server cleanly<a name="line.445"></a>
-<span class="sourceLineNo">446</span>   *<a name="line.446"></a>
-<span class="sourceLineNo">447</span>   * @param serverNumber  Used as index into a list.<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   * @param shutdownFS True is we are to shutdown the filesystem as part of this<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   * regionserver's shutdown.  Usually we do but you do not want to do this if<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   * you are running multiple regionservers in a test and you shut down one<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * before end of the test.<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * @return the region server that was stopped<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public JVMClusterUtil.RegionServerThread stopRegionServer(int serverNumber,<a name="line.454"></a>
-<span class="sourceLineNo">455</span>      final boolean shutdownFS) {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    JVMClusterUtil.RegionServerThread server =<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      hbaseCluster.getRegionServers().get(serverNumber);<a name="line.457"></a>
-<span class="sourceLineNo">458</span>    LOG.info("Stopping " + server.toString());<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    server.getRegionServer().stop("Stopping rs " + serverNumber);<a name="line.459"></a>
-<span class="sourceLineNo">460</span>    return server;<a name="line.460"></a>
-<span class="sourceLineNo">461</span>  }<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>  /**<a name="line.463"></a>
-<span class="sourceLineNo">464</span>   * Wait for the specified region server to stop. Removes this thread from list<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * of running threads.<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   * @param serverNumber<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * @return Name of region server that just went down.<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public String waitOnRegionServer(final int serverNumber) {<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    return this.hbaseCluster.waitOnRegionServer(serverNumber);<a name="line.470"></a>
-<span class="sourceLineNo">471</span>  }<a name="line.471"></a>
-<span class="sourceLineNo">472</span><a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /**<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   * Starts a master thread running<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   *<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   * @return New RegionServerThread<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   */<a name="line.478"></a>
-<span class="sourceLineNo">479</span>  public JVMClusterUtil.MasterThread startMaster() throws IOException {<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    Configuration c = HBaseConfiguration.create(conf);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    User user =<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        HBaseTestingUtility.getDifferentUser(c, ".hfs."+index++);<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>    JVMClusterUtil.MasterThread t = null;<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    try {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      t = hbaseCluster.addMaster(c, hbaseCluster.getMasters().size(), user);<a name="line.486"></a>
-<span class="sourceLineNo">487</span>      t.start();<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    } catch (InterruptedException ie) {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      throw new IOException("Interrupted adding master to cluster", ie);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    }<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    return t;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>  }<a name="line.492"></a>
-<span class="sourceLineNo">493</span><a name="line.493"></a>
-<span class="sourceLineNo">494</span>  /**<a name="line.494"></a>
-<span class="sourceLineNo">495</span>   * Returns the current active master, if available.<a name="line.495"></a>
-<span class="sourceLineNo">496</span>   * @return the active HMaster, null if none is active.<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   */<a name="line.497"></a>
-<span class="sourceLineNo">498</span>  @Override<a name="line.498"></a>
-<span class="sourceLineNo">499</span>  public MasterService.BlockingInterface getMasterAdminService() {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    return this.hbaseCluster.getActiveMaster().getMasterRpcServices();<a name="line.500"></a>
-<span class="sourceLineNo">501</span>  }<a name="line.501"></a>
-<span class="sourceLineNo">502</span><a name="line.502"></a>
-<span class="sourceLineNo">503</span>  /**<a name="line.503"></a>
-<span class="sourceLineNo">504</span>   * Returns the current active master, if available.<a name="line.504"></a>
-<span class="sourceLineNo">505</span>   * @return the active HMaster, null if none is active.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>   */<a name="line.506"></a>
-<span class="sourceLineNo">507</span>  public HMaster getMaster() {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    return this.hbaseCluster.getActiveMaster();<a name="line.508"></a>
-<span class="sourceLineNo">509</span>  }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>  /**<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * Returns the current active master thread, if available.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   * @return the active MasterThread, null if none is active.<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   */<a name="line.514"></a>
-<span class="sourceLineNo">515</span>  public MasterThread getMasterThread() {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>    for (MasterThread mt: hbaseCluster.getLiveMasters()) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      if (mt.getMaster().isActiveMaster()) {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        return mt;<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>    }<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    return null;<a name="line.521"></a>
-<span class="sourceLineNo">522</span>  }<a name="line.522"></a>
-<span class="sourceLineNo">523</span><a name="line.523"></a>
-<span class="sourceLineNo">524</span>  /**<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * Returns the master at the specified index, if available.<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   * @return the active HMaster, null if none is active.<a name="line.526"></a>
-<span class="sourceLineNo">527</span>   */<a name="line.527"></a>
-<span class="sourceLineNo">528</span>  public HMaster getMaster(final int serverNumber) {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    return this.hbaseCluster.getMaster(serverNumber);<a name="line.529"></a>
-<span class="sourceLineNo">530</span>  }<a name="line.530"></a>
-<span class="sourceLineNo">531</span><a name="line.531"></a>
-<span class="sourceLineNo">532</span>  /**<a name="line.532"></a>
-<span class="sourceLineNo">533</span>   * Cause a master to exit without shutting down entire cluster.<a name="line.533"></a>
-<span class="sourceLineNo">534</span>   * @param serverNumber  Used as index into a list.<a name="line.534"></a>
-<span class="sourceLineNo">535</span>   */<a name="line.535"></a>
-<span class="sourceLineNo">536</span>  public String abortMaster(int serverNumber) {<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    HMaster server = getMaster(serverNumber);<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    LOG.info("Aborting " + server.toString());<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    server.abort("Aborting for tests", new Exception("Trace info"));<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    return server.toString();<a name="line.540"></a>
-<span class="sourceLineNo">541</span>  }<a name="line.541"></a>
-<span class="sourceLineNo">542</span><a name="line.542"></a>
-<span class="sourceLineNo">543</span>  /**<a name="line.543"></a>
-<span class="sourceLineNo">544</span>   * Shut down the specified master cleanly<a name="line.544"></a>
-<span class="sourceLineNo">545</span>   *<a name="line.545"></a>
-<span class="sourceLineNo">546</span>   * @param serverNumber  Used as index into a list.<a name="line.546"></a>
-<span class="sourceLineNo">547</span>   * @return the region server that was stopped<a name="line.547"></a>
-<span class="sourceLineNo">548</span>   */<a name="line.548"></a>
-<span class="sourceLineNo">549</span>  public JVMClusterUtil.MasterThread stopMaster(int serverNumber) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>    return stopMaster(serverNumber, true);<a name="line.550"></a>
-<span class="sourceLineNo">551</sp

<TRUNCATED>

[11/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
index f2fd195..b293714 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
@@ -619,1696 +619,1698 @@
 <span class="sourceLineNo">611</span>    try {<a name="line.611"></a>
 <span class="sourceLineNo">612</span>      long procId =<a name="line.612"></a>
 <span class="sourceLineNo">613</span>          master.createTable(tableDescriptor, splitKeys, req.getNonceGroup(), req.getNonce());<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      return CreateTableResponse.newBuilder().setProcId(procId).build();<a name="line.614"></a>
-<span class="sourceLineNo">615</span>    } catch (IOException ioe) {<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      throw new ServiceException(ioe);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>  }<a name="line.618"></a>
-<span class="sourceLineNo">619</span><a name="line.619"></a>
-<span class="sourceLineNo">620</span>  @Override<a name="line.620"></a>
-<span class="sourceLineNo">621</span>  public DeleteColumnResponse deleteColumn(RpcController controller,<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      DeleteColumnRequest req) throws ServiceException {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    try {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      long procId = master.deleteColumn(<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        ProtobufUtil.toTableName(req.getTableName()),<a name="line.625"></a>
-<span class="sourceLineNo">626</span>        req.getColumnName().toByteArray(),<a name="line.626"></a>
-<span class="sourceLineNo">627</span>        req.getNonceGroup(),<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        req.getNonce());<a name="line.628"></a>
-<span class="sourceLineNo">629</span>      if (procId == -1) {<a name="line.629"></a>
-<span class="sourceLineNo">630</span>        // This mean operation was not performed in server, so do not set any procId<a name="line.630"></a>
-<span class="sourceLineNo">631</span>        return DeleteColumnResponse.newBuilder().build();<a name="line.631"></a>
-<span class="sourceLineNo">632</span>      } else {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>        return DeleteColumnResponse.newBuilder().setProcId(procId).build();<a name="line.633"></a>
-<span class="sourceLineNo">634</span>      }<a name="line.634"></a>
-<span class="sourceLineNo">635</span>    } catch (IOException ioe) {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      throw new ServiceException(ioe);<a name="line.636"></a>
-<span class="sourceLineNo">637</span>    }<a name="line.637"></a>
-<span class="sourceLineNo">638</span>  }<a name="line.638"></a>
-<span class="sourceLineNo">639</span><a name="line.639"></a>
-<span class="sourceLineNo">640</span>  @Override<a name="line.640"></a>
-<span class="sourceLineNo">641</span>  public DeleteNamespaceResponse deleteNamespace(RpcController controller,<a name="line.641"></a>
-<span class="sourceLineNo">642</span>      DeleteNamespaceRequest request) throws ServiceException {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>    try {<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      long procId = master.deleteNamespace(<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        request.getNamespaceName(),<a name="line.645"></a>
-<span class="sourceLineNo">646</span>        request.getNonceGroup(),<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        request.getNonce());<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      return DeleteNamespaceResponse.newBuilder().setProcId(procId).build();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    } catch (IOException e) {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      throw new ServiceException(e);<a name="line.650"></a>
-<span class="sourceLineNo">651</span>    }<a name="line.651"></a>
-<span class="sourceLineNo">652</span>  }<a name="line.652"></a>
-<span class="sourceLineNo">653</span><a name="line.653"></a>
-<span class="sourceLineNo">654</span>  /**<a name="line.654"></a>
-<span class="sourceLineNo">655</span>   * Execute Delete Snapshot operation.<a name="line.655"></a>
-<span class="sourceLineNo">656</span>   * @return DeleteSnapshotResponse (a protobuf wrapped void) if the snapshot existed and was<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   *    deleted properly.<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * @throws ServiceException wrapping SnapshotDoesNotExistException if specified snapshot did not<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   *    exist.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  @Override<a name="line.661"></a>
-<span class="sourceLineNo">662</span>  public DeleteSnapshotResponse deleteSnapshot(RpcController controller,<a name="line.662"></a>
-<span class="sourceLineNo">663</span>      DeleteSnapshotRequest request) throws ServiceException {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    try {<a name="line.664"></a>
-<span class="sourceLineNo">665</span>      master.checkInitialized();<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      master.snapshotManager.checkSnapshotSupport();<a name="line.666"></a>
-<span class="sourceLineNo">667</span><a name="line.667"></a>
-<span class="sourceLineNo">668</span>      LOG.info(master.getClientIdAuditPrefix() + " delete " + request.getSnapshot());<a name="line.668"></a>
-<span class="sourceLineNo">669</span>      master.snapshotManager.deleteSnapshot(request.getSnapshot());<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      return DeleteSnapshotResponse.newBuilder().build();<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    } catch (IOException e) {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      throw new ServiceException(e);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    }<a name="line.673"></a>
-<span class="sourceLineNo">674</span>  }<a name="line.674"></a>
-<span class="sourceLineNo">675</span><a name="line.675"></a>
-<span class="sourceLineNo">676</span>  @Override<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  public DeleteTableResponse deleteTable(RpcController controller,<a name="line.677"></a>
-<span class="sourceLineNo">678</span>      DeleteTableRequest request) throws ServiceException {<a name="line.678"></a>
-<span class="sourceLineNo">679</span>    try {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      long procId = master.deleteTable(ProtobufUtil.toTableName(<a name="line.680"></a>
-<span class="sourceLineNo">681</span>          request.getTableName()), request.getNonceGroup(), request.getNonce());<a name="line.681"></a>
-<span class="sourceLineNo">682</span>      return DeleteTableResponse.newBuilder().setProcId(procId).build();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    } catch (IOException ioe) {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      throw new ServiceException(ioe);<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    }<a name="line.685"></a>
-<span class="sourceLineNo">686</span>  }<a name="line.686"></a>
-<span class="sourceLineNo">687</span><a name="line.687"></a>
-<span class="sourceLineNo">688</span>  @Override<a name="line.688"></a>
-<span class="sourceLineNo">689</span>  public TruncateTableResponse truncateTable(RpcController controller, TruncateTableRequest request)<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      throws ServiceException {<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    try {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      long procId = master.truncateTable(<a name="line.692"></a>
-<span class="sourceLineNo">693</span>        ProtobufUtil.toTableName(request.getTableName()),<a name="line.693"></a>
-<span class="sourceLineNo">694</span>        request.getPreserveSplits(),<a name="line.694"></a>
-<span class="sourceLineNo">695</span>        request.getNonceGroup(),<a name="line.695"></a>
-<span class="sourceLineNo">696</span>        request.getNonce());<a name="line.696"></a>
-<span class="sourceLineNo">697</span>      return TruncateTableResponse.newBuilder().setProcId(procId).build();<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    } catch (IOException ioe) {<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      throw new ServiceException(ioe);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    }<a name="line.700"></a>
-<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>  @Override<a name="line.703"></a>
-<span class="sourceLineNo">704</span>  public DisableTableResponse disableTable(RpcController controller,<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      DisableTableRequest request) throws ServiceException {<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    try {<a name="line.706"></a>
-<span class="sourceLineNo">707</span>      long procId = master.disableTable(<a name="line.707"></a>
-<span class="sourceLineNo">708</span>        ProtobufUtil.toTableName(request.getTableName()),<a name="line.708"></a>
-<span class="sourceLineNo">709</span>        request.getNonceGroup(),<a name="line.709"></a>
-<span class="sourceLineNo">710</span>        request.getNonce());<a name="line.710"></a>
-<span class="sourceLineNo">711</span>      return DisableTableResponse.newBuilder().setProcId(procId).build();<a name="line.711"></a>
-<span class="sourceLineNo">712</span>    } catch (IOException ioe) {<a name="line.712"></a>
-<span class="sourceLineNo">713</span>      throw new ServiceException(ioe);<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    }<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  }<a name="line.715"></a>
-<span class="sourceLineNo">716</span><a name="line.716"></a>
-<span class="sourceLineNo">717</span>  @Override<a name="line.717"></a>
-<span class="sourceLineNo">718</span>  public EnableCatalogJanitorResponse enableCatalogJanitor(RpcController c,<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      EnableCatalogJanitorRequest req) throws ServiceException {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    rpcPreCheck("enableCatalogJanitor");<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    return EnableCatalogJanitorResponse.newBuilder().setPrevValue(<a name="line.721"></a>
-<span class="sourceLineNo">722</span>      master.catalogJanitorChore.setEnabled(req.getEnable())).build();<a name="line.722"></a>
-<span class="sourceLineNo">723</span>  }<a name="line.723"></a>
-<span class="sourceLineNo">724</span><a name="line.724"></a>
-<span class="sourceLineNo">725</span>  @Override<a name="line.725"></a>
-<span class="sourceLineNo">726</span>  public SetCleanerChoreRunningResponse setCleanerChoreRunning(<a name="line.726"></a>
-<span class="sourceLineNo">727</span>    RpcController c, SetCleanerChoreRunningRequest req) throws ServiceException {<a name="line.727"></a>
-<span class="sourceLineNo">728</span>    rpcPreCheck("setCleanerChoreRunning");<a name="line.728"></a>
-<span class="sourceLineNo">729</span><a name="line.729"></a>
-<span class="sourceLineNo">730</span>    boolean prevValue =<a name="line.730"></a>
-<span class="sourceLineNo">731</span>      master.getLogCleaner().getEnabled() &amp;&amp; master.getHFileCleaner().getEnabled();<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    master.getLogCleaner().setEnabled(req.getOn());<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    master.getHFileCleaner().setEnabled(req.getOn());<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    return SetCleanerChoreRunningResponse.newBuilder().setPrevValue(prevValue).build();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>  }<a name="line.735"></a>
-<span class="sourceLineNo">736</span><a name="line.736"></a>
-<span class="sourceLineNo">737</span>  @Override<a name="line.737"></a>
-<span class="sourceLineNo">738</span>  public EnableTableResponse enableTable(RpcController controller,<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      EnableTableRequest request) throws ServiceException {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>    try {<a name="line.740"></a>
-<span class="sourceLineNo">741</span>      long procId = master.enableTable(<a name="line.741"></a>
-<span class="sourceLineNo">742</span>        ProtobufUtil.toTableName(request.getTableName()),<a name="line.742"></a>
-<span class="sourceLineNo">743</span>        request.getNonceGroup(),<a name="line.743"></a>
-<span class="sourceLineNo">744</span>        request.getNonce());<a name="line.744"></a>
-<span class="sourceLineNo">745</span>      return EnableTableResponse.newBuilder().setProcId(procId).build();<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    } catch (IOException ioe) {<a name="line.746"></a>
-<span class="sourceLineNo">747</span>      throw new ServiceException(ioe);<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    }<a name="line.748"></a>
-<span class="sourceLineNo">749</span>  }<a name="line.749"></a>
-<span class="sourceLineNo">750</span><a name="line.750"></a>
-<span class="sourceLineNo">751</span>  @Override<a name="line.751"></a>
-<span class="sourceLineNo">752</span>  public MergeTableRegionsResponse mergeTableRegions(<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      RpcController c, MergeTableRegionsRequest request) throws ServiceException {<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    try {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      master.checkInitialized();<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    } catch (IOException ioe) {<a name="line.756"></a>
-<span class="sourceLineNo">757</span>      throw new ServiceException(ioe);<a name="line.757"></a>
-<span class="sourceLineNo">758</span>    }<a name="line.758"></a>
-<span class="sourceLineNo">759</span><a name="line.759"></a>
-<span class="sourceLineNo">760</span>    RegionStates regionStates = master.getAssignmentManager().getRegionStates();<a name="line.760"></a>
+<span class="sourceLineNo">614</span>      LOG.info(master.getClientIdAuditPrefix() + " procedure request for creating table: " +<a name="line.614"></a>
+<span class="sourceLineNo">615</span>              req.getTableSchema().getTableName() + " procId is: " + procId);<a name="line.615"></a>
+<span class="sourceLineNo">616</span>      return CreateTableResponse.newBuilder().setProcId(procId).build();<a name="line.616"></a>
+<span class="sourceLineNo">617</span>    } catch (IOException ioe) {<a name="line.617"></a>
+<span class="sourceLineNo">618</span>      throw new ServiceException(ioe);<a name="line.618"></a>
+<span class="sourceLineNo">619</span>    }<a name="line.619"></a>
+<span class="sourceLineNo">620</span>  }<a name="line.620"></a>
+<span class="sourceLineNo">621</span><a name="line.621"></a>
+<span class="sourceLineNo">622</span>  @Override<a name="line.622"></a>
+<span class="sourceLineNo">623</span>  public DeleteColumnResponse deleteColumn(RpcController controller,<a name="line.623"></a>
+<span class="sourceLineNo">624</span>      DeleteColumnRequest req) throws ServiceException {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>    try {<a name="line.625"></a>
+<span class="sourceLineNo">626</span>      long procId = master.deleteColumn(<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        ProtobufUtil.toTableName(req.getTableName()),<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        req.getColumnName().toByteArray(),<a name="line.628"></a>
+<span class="sourceLineNo">629</span>        req.getNonceGroup(),<a name="line.629"></a>
+<span class="sourceLineNo">630</span>        req.getNonce());<a name="line.630"></a>
+<span class="sourceLineNo">631</span>      if (procId == -1) {<a name="line.631"></a>
+<span class="sourceLineNo">632</span>        // This mean operation was not performed in server, so do not set any procId<a name="line.632"></a>
+<span class="sourceLineNo">633</span>        return DeleteColumnResponse.newBuilder().build();<a name="line.633"></a>
+<span class="sourceLineNo">634</span>      } else {<a name="line.634"></a>
+<span class="sourceLineNo">635</span>        return DeleteColumnResponse.newBuilder().setProcId(procId).build();<a name="line.635"></a>
+<span class="sourceLineNo">636</span>      }<a name="line.636"></a>
+<span class="sourceLineNo">637</span>    } catch (IOException ioe) {<a name="line.637"></a>
+<span class="sourceLineNo">638</span>      throw new ServiceException(ioe);<a name="line.638"></a>
+<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
+<span class="sourceLineNo">640</span>  }<a name="line.640"></a>
+<span class="sourceLineNo">641</span><a name="line.641"></a>
+<span class="sourceLineNo">642</span>  @Override<a name="line.642"></a>
+<span class="sourceLineNo">643</span>  public DeleteNamespaceResponse deleteNamespace(RpcController controller,<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      DeleteNamespaceRequest request) throws ServiceException {<a name="line.644"></a>
+<span class="sourceLineNo">645</span>    try {<a name="line.645"></a>
+<span class="sourceLineNo">646</span>      long procId = master.deleteNamespace(<a name="line.646"></a>
+<span class="sourceLineNo">647</span>        request.getNamespaceName(),<a name="line.647"></a>
+<span class="sourceLineNo">648</span>        request.getNonceGroup(),<a name="line.648"></a>
+<span class="sourceLineNo">649</span>        request.getNonce());<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      return DeleteNamespaceResponse.newBuilder().setProcId(procId).build();<a name="line.650"></a>
+<span class="sourceLineNo">651</span>    } catch (IOException e) {<a name="line.651"></a>
+<span class="sourceLineNo">652</span>      throw new ServiceException(e);<a name="line.652"></a>
+<span class="sourceLineNo">653</span>    }<a name="line.653"></a>
+<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
+<span class="sourceLineNo">655</span><a name="line.655"></a>
+<span class="sourceLineNo">656</span>  /**<a name="line.656"></a>
+<span class="sourceLineNo">657</span>   * Execute Delete Snapshot operation.<a name="line.657"></a>
+<span class="sourceLineNo">658</span>   * @return DeleteSnapshotResponse (a protobuf wrapped void) if the snapshot existed and was<a name="line.658"></a>
+<span class="sourceLineNo">659</span>   *    deleted properly.<a name="line.659"></a>
+<span class="sourceLineNo">660</span>   * @throws ServiceException wrapping SnapshotDoesNotExistException if specified snapshot did not<a name="line.660"></a>
+<span class="sourceLineNo">661</span>   *    exist.<a name="line.661"></a>
+<span class="sourceLineNo">662</span>   */<a name="line.662"></a>
+<span class="sourceLineNo">663</span>  @Override<a name="line.663"></a>
+<span class="sourceLineNo">664</span>  public DeleteSnapshotResponse deleteSnapshot(RpcController controller,<a name="line.664"></a>
+<span class="sourceLineNo">665</span>      DeleteSnapshotRequest request) throws ServiceException {<a name="line.665"></a>
+<span class="sourceLineNo">666</span>    try {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>      master.checkInitialized();<a name="line.667"></a>
+<span class="sourceLineNo">668</span>      master.snapshotManager.checkSnapshotSupport();<a name="line.668"></a>
+<span class="sourceLineNo">669</span><a name="line.669"></a>
+<span class="sourceLineNo">670</span>      LOG.info(master.getClientIdAuditPrefix() + " delete " + request.getSnapshot());<a name="line.670"></a>
+<span class="sourceLineNo">671</span>      master.snapshotManager.deleteSnapshot(request.getSnapshot());<a name="line.671"></a>
+<span class="sourceLineNo">672</span>      return DeleteSnapshotResponse.newBuilder().build();<a name="line.672"></a>
+<span class="sourceLineNo">673</span>    } catch (IOException e) {<a name="line.673"></a>
+<span class="sourceLineNo">674</span>      throw new ServiceException(e);<a name="line.674"></a>
+<span class="sourceLineNo">675</span>    }<a name="line.675"></a>
+<span class="sourceLineNo">676</span>  }<a name="line.676"></a>
+<span class="sourceLineNo">677</span><a name="line.677"></a>
+<span class="sourceLineNo">678</span>  @Override<a name="line.678"></a>
+<span class="sourceLineNo">679</span>  public DeleteTableResponse deleteTable(RpcController controller,<a name="line.679"></a>
+<span class="sourceLineNo">680</span>      DeleteTableRequest request) throws ServiceException {<a name="line.680"></a>
+<span class="sourceLineNo">681</span>    try {<a name="line.681"></a>
+<span class="sourceLineNo">682</span>      long procId = master.deleteTable(ProtobufUtil.toTableName(<a name="line.682"></a>
+<span class="sourceLineNo">683</span>          request.getTableName()), request.getNonceGroup(), request.getNonce());<a name="line.683"></a>
+<span class="sourceLineNo">684</span>      return DeleteTableResponse.newBuilder().setProcId(procId).build();<a name="line.684"></a>
+<span class="sourceLineNo">685</span>    } catch (IOException ioe) {<a name="line.685"></a>
+<span class="sourceLineNo">686</span>      throw new ServiceException(ioe);<a name="line.686"></a>
+<span class="sourceLineNo">687</span>    }<a name="line.687"></a>
+<span class="sourceLineNo">688</span>  }<a name="line.688"></a>
+<span class="sourceLineNo">689</span><a name="line.689"></a>
+<span class="sourceLineNo">690</span>  @Override<a name="line.690"></a>
+<span class="sourceLineNo">691</span>  public TruncateTableResponse truncateTable(RpcController controller, TruncateTableRequest request)<a name="line.691"></a>
+<span class="sourceLineNo">692</span>      throws ServiceException {<a name="line.692"></a>
+<span class="sourceLineNo">693</span>    try {<a name="line.693"></a>
+<span class="sourceLineNo">694</span>      long procId = master.truncateTable(<a name="line.694"></a>
+<span class="sourceLineNo">695</span>        ProtobufUtil.toTableName(request.getTableName()),<a name="line.695"></a>
+<span class="sourceLineNo">696</span>        request.getPreserveSplits(),<a name="line.696"></a>
+<span class="sourceLineNo">697</span>        request.getNonceGroup(),<a name="line.697"></a>
+<span class="sourceLineNo">698</span>        request.getNonce());<a name="line.698"></a>
+<span class="sourceLineNo">699</span>      return TruncateTableResponse.newBuilder().setProcId(procId).build();<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    } catch (IOException ioe) {<a name="line.700"></a>
+<span class="sourceLineNo">701</span>      throw new ServiceException(ioe);<a name="line.701"></a>
+<span class="sourceLineNo">702</span>    }<a name="line.702"></a>
+<span class="sourceLineNo">703</span>  }<a name="line.703"></a>
+<span class="sourceLineNo">704</span><a name="line.704"></a>
+<span class="sourceLineNo">705</span>  @Override<a name="line.705"></a>
+<span class="sourceLineNo">706</span>  public DisableTableResponse disableTable(RpcController controller,<a name="line.706"></a>
+<span class="sourceLineNo">707</span>      DisableTableRequest request) throws ServiceException {<a name="line.707"></a>
+<span class="sourceLineNo">708</span>    try {<a name="line.708"></a>
+<span class="sourceLineNo">709</span>      long procId = master.disableTable(<a name="line.709"></a>
+<span class="sourceLineNo">710</span>        ProtobufUtil.toTableName(request.getTableName()),<a name="line.710"></a>
+<span class="sourceLineNo">711</span>        request.getNonceGroup(),<a name="line.711"></a>
+<span class="sourceLineNo">712</span>        request.getNonce());<a name="line.712"></a>
+<span class="sourceLineNo">713</span>      return DisableTableResponse.newBuilder().setProcId(procId).build();<a name="line.713"></a>
+<span class="sourceLineNo">714</span>    } catch (IOException ioe) {<a name="line.714"></a>
+<span class="sourceLineNo">715</span>      throw new ServiceException(ioe);<a name="line.715"></a>
+<span class="sourceLineNo">716</span>    }<a name="line.716"></a>
+<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
+<span class="sourceLineNo">718</span><a name="line.718"></a>
+<span class="sourceLineNo">719</span>  @Override<a name="line.719"></a>
+<span class="sourceLineNo">720</span>  public EnableCatalogJanitorResponse enableCatalogJanitor(RpcController c,<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      EnableCatalogJanitorRequest req) throws ServiceException {<a name="line.721"></a>
+<span class="sourceLineNo">722</span>    rpcPreCheck("enableCatalogJanitor");<a name="line.722"></a>
+<span class="sourceLineNo">723</span>    return EnableCatalogJanitorResponse.newBuilder().setPrevValue(<a name="line.723"></a>
+<span class="sourceLineNo">724</span>      master.catalogJanitorChore.setEnabled(req.getEnable())).build();<a name="line.724"></a>
+<span class="sourceLineNo">725</span>  }<a name="line.725"></a>
+<span class="sourceLineNo">726</span><a name="line.726"></a>
+<span class="sourceLineNo">727</span>  @Override<a name="line.727"></a>
+<span class="sourceLineNo">728</span>  public SetCleanerChoreRunningResponse setCleanerChoreRunning(<a name="line.728"></a>
+<span class="sourceLineNo">729</span>    RpcController c, SetCleanerChoreRunningRequest req) throws ServiceException {<a name="line.729"></a>
+<span class="sourceLineNo">730</span>    rpcPreCheck("setCleanerChoreRunning");<a name="line.730"></a>
+<span class="sourceLineNo">731</span><a name="line.731"></a>
+<span class="sourceLineNo">732</span>    boolean prevValue =<a name="line.732"></a>
+<span class="sourceLineNo">733</span>      master.getLogCleaner().getEnabled() &amp;&amp; master.getHFileCleaner().getEnabled();<a name="line.733"></a>
+<span class="sourceLineNo">734</span>    master.getLogCleaner().setEnabled(req.getOn());<a name="line.734"></a>
+<span class="sourceLineNo">735</span>    master.getHFileCleaner().setEnabled(req.getOn());<a name="line.735"></a>
+<span class="sourceLineNo">736</span>    return SetCleanerChoreRunningResponse.newBuilder().setPrevValue(prevValue).build();<a name="line.736"></a>
+<span class="sourceLineNo">737</span>  }<a name="line.737"></a>
+<span class="sourceLineNo">738</span><a name="line.738"></a>
+<span class="sourceLineNo">739</span>  @Override<a name="line.739"></a>
+<span class="sourceLineNo">740</span>  public EnableTableResponse enableTable(RpcController controller,<a name="line.740"></a>
+<span class="sourceLineNo">741</span>      EnableTableRequest request) throws ServiceException {<a name="line.741"></a>
+<span class="sourceLineNo">742</span>    try {<a name="line.742"></a>
+<span class="sourceLineNo">743</span>      long procId = master.enableTable(<a name="line.743"></a>
+<span class="sourceLineNo">744</span>        ProtobufUtil.toTableName(request.getTableName()),<a name="line.744"></a>
+<span class="sourceLineNo">745</span>        request.getNonceGroup(),<a name="line.745"></a>
+<span class="sourceLineNo">746</span>        request.getNonce());<a name="line.746"></a>
+<span class="sourceLineNo">747</span>      return EnableTableResponse.newBuilder().setProcId(procId).build();<a name="line.747"></a>
+<span class="sourceLineNo">748</span>    } catch (IOException ioe) {<a name="line.748"></a>
+<span class="sourceLineNo">749</span>      throw new ServiceException(ioe);<a name="line.749"></a>
+<span class="sourceLineNo">750</span>    }<a name="line.750"></a>
+<span class="sourceLineNo">751</span>  }<a name="line.751"></a>
+<span class="sourceLineNo">752</span><a name="line.752"></a>
+<span class="sourceLineNo">753</span>  @Override<a name="line.753"></a>
+<span class="sourceLineNo">754</span>  public MergeTableRegionsResponse mergeTableRegions(<a name="line.754"></a>
+<span class="sourceLineNo">755</span>      RpcController c, MergeTableRegionsRequest request) throws ServiceException {<a name="line.755"></a>
+<span class="sourceLineNo">756</span>    try {<a name="line.756"></a>
+<span class="sourceLineNo">757</span>      master.checkInitialized();<a name="line.757"></a>
+<span class="sourceLineNo">758</span>    } catch (IOException ioe) {<a name="line.758"></a>
+<span class="sourceLineNo">759</span>      throw new ServiceException(ioe);<a name="line.759"></a>
+<span class="sourceLineNo">760</span>    }<a name="line.760"></a>
 <span class="sourceLineNo">761</span><a name="line.761"></a>
-<span class="sourceLineNo">762</span>    assert(request.getRegionCount() == 2);<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    RegionInfo[] regionsToMerge = new RegionInfo[request.getRegionCount()];<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    for (int i = 0; i &lt; request.getRegionCount(); i++) {<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      final byte[] encodedNameOfRegion = request.getRegion(i).getValue().toByteArray();<a name="line.765"></a>
-<span class="sourceLineNo">766</span>      if (request.getRegion(i).getType() != RegionSpecifierType.ENCODED_REGION_NAME) {<a name="line.766"></a>
-<span class="sourceLineNo">767</span>        LOG.warn("MergeRegions specifier type: expected: "<a name="line.767"></a>
-<span class="sourceLineNo">768</span>          + RegionSpecifierType.ENCODED_REGION_NAME + " actual: region " + i + " ="<a name="line.768"></a>
-<span class="sourceLineNo">769</span>          + request.getRegion(i).getType());<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      }<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      RegionState regionState = regionStates.getRegionState(Bytes.toString(encodedNameOfRegion));<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      if (regionState == null) {<a name="line.772"></a>
-<span class="sourceLineNo">773</span>        throw new ServiceException(<a name="line.773"></a>
-<span class="sourceLineNo">774</span>          new UnknownRegionException(Bytes.toStringBinary(encodedNameOfRegion)));<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      }<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      regionsToMerge[i] = regionState.getRegion();<a name="line.776"></a>
-<span class="sourceLineNo">777</span>    }<a name="line.777"></a>
-<span class="sourceLineNo">778</span><a name="line.778"></a>
-<span class="sourceLineNo">779</span>    try {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      long procId = master.mergeRegions(<a name="line.780"></a>
-<span class="sourceLineNo">781</span>        regionsToMerge,<a name="line.781"></a>
-<span class="sourceLineNo">782</span>        request.getForcible(),<a name="line.782"></a>
-<span class="sourceLineNo">783</span>        request.getNonceGroup(),<a name="line.783"></a>
-<span class="sourceLineNo">784</span>        request.getNonce());<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      return MergeTableRegionsResponse.newBuilder().setProcId(procId).build();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    } catch (IOException ioe) {<a name="line.786"></a>
-<span class="sourceLineNo">787</span>      throw new ServiceException(ioe);<a name="line.787"></a>
-<span class="sourceLineNo">788</span>    }<a name="line.788"></a>
-<span class="sourceLineNo">789</span>  }<a name="line.789"></a>
-<span class="sourceLineNo">790</span><a name="line.790"></a>
-<span class="sourceLineNo">791</span>  @Override<a name="line.791"></a>
-<span class="sourceLineNo">792</span>  public SplitTableRegionResponse splitRegion(final RpcController controller,<a name="line.792"></a>
-<span class="sourceLineNo">793</span>      final SplitTableRegionRequest request) throws ServiceException {<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    try {<a name="line.794"></a>
-<span class="sourceLineNo">795</span>      long procId = master.splitRegion(<a name="line.795"></a>
-<span class="sourceLineNo">796</span>        ProtobufUtil.toRegionInfo(request.getRegionInfo()),<a name="line.796"></a>
-<span class="sourceLineNo">797</span>        request.hasSplitRow() ? request.getSplitRow().toByteArray() : null,<a name="line.797"></a>
-<span class="sourceLineNo">798</span>        request.getNonceGroup(),<a name="line.798"></a>
-<span class="sourceLineNo">799</span>        request.getNonce());<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      return SplitTableRegionResponse.newBuilder().setProcId(procId).build();<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    } catch (IOException ie) {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      throw new ServiceException(ie);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>    }<a name="line.803"></a>
-<span class="sourceLineNo">804</span>  }<a name="line.804"></a>
-<span class="sourceLineNo">805</span><a name="line.805"></a>
-<span class="sourceLineNo">806</span>  @Override<a name="line.806"></a>
-<span class="sourceLineNo">807</span>  public ClientProtos.CoprocessorServiceResponse execMasterService(final RpcController controller,<a name="line.807"></a>
-<span class="sourceLineNo">808</span>      final ClientProtos.CoprocessorServiceRequest request) throws ServiceException {<a name="line.808"></a>
-<span class="sourceLineNo">809</span>    rpcPreCheck("execMasterService");<a name="line.809"></a>
-<span class="sourceLineNo">810</span>    try {<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      ServerRpcController execController = new ServerRpcController();<a name="line.811"></a>
-<span class="sourceLineNo">812</span>      ClientProtos.CoprocessorServiceCall call = request.getCall();<a name="line.812"></a>
-<span class="sourceLineNo">813</span>      String serviceName = call.getServiceName();<a name="line.813"></a>
-<span class="sourceLineNo">814</span>      String methodName = call.getMethodName();<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      if (!master.coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        throw new UnknownProtocolException(null,<a name="line.816"></a>
-<span class="sourceLineNo">817</span>          "No registered Master Coprocessor Endpoint found for " + serviceName +<a name="line.817"></a>
-<span class="sourceLineNo">818</span>          ". Has it been enabled?");<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      }<a name="line.819"></a>
-<span class="sourceLineNo">820</span><a name="line.820"></a>
-<span class="sourceLineNo">821</span>      com.google.protobuf.Service service = master.coprocessorServiceHandlers.get(serviceName);<a name="line.821"></a>
-<span class="sourceLineNo">822</span>      com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc = service.getDescriptorForType();<a name="line.822"></a>
-<span class="sourceLineNo">823</span>      com.google.protobuf.Descriptors.MethodDescriptor methodDesc =<a name="line.823"></a>
-<span class="sourceLineNo">824</span>          CoprocessorRpcUtils.getMethodDescriptor(methodName, serviceDesc);<a name="line.824"></a>
-<span class="sourceLineNo">825</span><a name="line.825"></a>
-<span class="sourceLineNo">826</span>      com.google.protobuf.Message execRequest =<a name="line.826"></a>
-<span class="sourceLineNo">827</span>          CoprocessorRpcUtils.getRequest(service, methodDesc, call.getRequest());<a name="line.827"></a>
-<span class="sourceLineNo">828</span>      final com.google.protobuf.Message.Builder responseBuilder =<a name="line.828"></a>
-<span class="sourceLineNo">829</span>          service.getResponsePrototype(methodDesc).newBuilderForType();<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      service.callMethod(methodDesc, execController, execRequest,<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        (message) -&gt; {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>          if (message != null) {<a name="line.832"></a>
-<span class="sourceLineNo">833</span>            responseBuilder.mergeFrom(message);<a name="line.833"></a>
-<span class="sourceLineNo">834</span>          }<a name="line.834"></a>
-<span class="sourceLineNo">835</span>        });<a name="line.835"></a>
-<span class="sourceLineNo">836</span>      com.google.protobuf.Message execResult = responseBuilder.build();<a name="line.836"></a>
-<span class="sourceLineNo">837</span>      if (execController.getFailedOn() != null) {<a name="line.837"></a>
-<span class="sourceLineNo">838</span>        throw execController.getFailedOn();<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      }<a name="line.839"></a>
-<span class="sourceLineNo">840</span>      return CoprocessorRpcUtils.getResponse(execResult, HConstants.EMPTY_BYTE_ARRAY);<a name="line.840"></a>
-<span class="sourceLineNo">841</span>    } catch (IOException ie) {<a name="line.841"></a>
-<span class="sourceLineNo">842</span>      throw new ServiceException(ie);<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    }<a name="line.843"></a>
-<span class="sourceLineNo">844</span>  }<a name="line.844"></a>
-<span class="sourceLineNo">845</span><a name="line.845"></a>
-<span class="sourceLineNo">846</span>  /**<a name="line.846"></a>
-<span class="sourceLineNo">847</span>   * Triggers an asynchronous attempt to run a distributed procedure.<a name="line.847"></a>
-<span class="sourceLineNo">848</span>   * {@inheritDoc}<a name="line.848"></a>
-<span class="sourceLineNo">849</span>   */<a name="line.849"></a>
-<span class="sourceLineNo">850</span>  @Override<a name="line.850"></a>
-<span class="sourceLineNo">851</span>  public ExecProcedureResponse execProcedure(RpcController controller,<a name="line.851"></a>
-<span class="sourceLineNo">852</span>      ExecProcedureRequest request) throws ServiceException {<a name="line.852"></a>
-<span class="sourceLineNo">853</span>    try {<a name="line.853"></a>
-<span class="sourceLineNo">854</span>      master.checkInitialized();<a name="line.854"></a>
-<span class="sourceLineNo">855</span>      ProcedureDescription desc = request.getProcedure();<a name="line.855"></a>
-<span class="sourceLineNo">856</span>      MasterProcedureManager mpm = master.getMasterProcedureManagerHost().getProcedureManager(<a name="line.856"></a>
-<span class="sourceLineNo">857</span>        desc.getSignature());<a name="line.857"></a>
-<span class="sourceLineNo">858</span>      if (mpm == null) {<a name="line.858"></a>
-<span class="sourceLineNo">859</span>        throw new ServiceException(new DoNotRetryIOException("The procedure is not registered: "<a name="line.859"></a>
-<span class="sourceLineNo">860</span>          + desc.getSignature()));<a name="line.860"></a>
-<span class="sourceLineNo">861</span>      }<a name="line.861"></a>
-<span class="sourceLineNo">862</span>      LOG.info(master.getClientIdAuditPrefix() + " procedure request for: " + desc.getSignature());<a name="line.862"></a>
-<span class="sourceLineNo">863</span>      mpm.checkPermissions(desc, accessChecker, RpcServer.getRequestUser().orElse(null));<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      mpm.execProcedure(desc);<a name="line.864"></a>
-<span class="sourceLineNo">865</span>      // send back the max amount of time the client should wait for the procedure<a name="line.865"></a>
-<span class="sourceLineNo">866</span>      // to complete<a name="line.866"></a>
-<span class="sourceLineNo">867</span>      long waitTime = SnapshotDescriptionUtils.DEFAULT_MAX_WAIT_TIME;<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      return ExecProcedureResponse.newBuilder().setExpectedTimeout(<a name="line.868"></a>
-<span class="sourceLineNo">869</span>        waitTime).build();<a name="line.869"></a>
-<span class="sourceLineNo">870</span>    } catch (ForeignException e) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>      throw new ServiceException(e.getCause());<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    } catch (IOException e) {<a name="line.872"></a>
-<span class="sourceLineNo">873</span>      throw new ServiceException(e);<a name="line.873"></a>
-<span class="sourceLineNo">874</span>    }<a name="line.874"></a>
-<span class="sourceLineNo">875</span>  }<a name="line.875"></a>
-<span class="sourceLineNo">876</span><a name="line.876"></a>
-<span class="sourceLineNo">877</span>  /**<a name="line.877"></a>
-<span class="sourceLineNo">878</span>   * Triggers a synchronous attempt to run a distributed procedure and sets<a name="line.878"></a>
-<span class="sourceLineNo">879</span>   * return data in response.<a name="line.879"></a>
-<span class="sourceLineNo">880</span>   * {@inheritDoc}<a name="line.880"></a>
-<span class="sourceLineNo">881</span>   */<a name="line.881"></a>
-<span class="sourceLineNo">882</span>  @Override<a name="line.882"></a>
-<span class="sourceLineNo">883</span>  public ExecProcedureResponse execProcedureWithRet(RpcController controller,<a name="line.883"></a>
-<span class="sourceLineNo">884</span>      ExecProcedureRequest request) throws ServiceException {<a name="line.884"></a>
-<span class="sourceLineNo">885</span>    rpcPreCheck("execProcedureWithRet");<a name="line.885"></a>
-<span class="sourceLineNo">886</span>    try {<a name="line.886"></a>
-<span class="sourceLineNo">887</span>      ProcedureDescription desc = request.getProcedure();<a name="line.887"></a>
-<span class="sourceLineNo">888</span>      MasterProcedureManager mpm =<a name="line.888"></a>
-<span class="sourceLineNo">889</span>        master.getMasterProcedureManagerHost().getProcedureManager(desc.getSignature());<a name="line.889"></a>
-<span class="sourceLineNo">890</span>      if (mpm == null) {<a name="line.890"></a>
-<span class="sourceLineNo">891</span>        throw new ServiceException("The procedure is not registered: " + desc.getSignature());<a name="line.891"></a>
-<span class="sourceLineNo">892</span>      }<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      LOG.info(master.getClientIdAuditPrefix() + " procedure request for: " + desc.getSignature());<a name="line.893"></a>
-<span class="sourceLineNo">894</span>      byte[] data = mpm.execProcedureWithRet(desc);<a name="line.894"></a>
-<span class="sourceLineNo">895</span>      ExecProcedureResponse.Builder builder = ExecProcedureResponse.newBuilder();<a name="line.895"></a>
-<span class="sourceLineNo">896</span>      // set return data if available<a name="line.896"></a>
-<span class="sourceLineNo">897</span>      if (data != null) {<a name="line.897"></a>
-<span class="sourceLineNo">898</span>        builder.setReturnData(UnsafeByteOperations.unsafeWrap(data));<a name="line.898"></a>
-<span class="sourceLineNo">899</span>      }<a name="line.899"></a>
-<span class="sourceLineNo">900</span>      return builder.build();<a name="line.900"></a>
-<span class="sourceLineNo">901</span>    } catch (IOException e) {<a name="line.901"></a>
-<span class="sourceLineNo">902</span>      throw new ServiceException(e);<a name="line.902"></a>
-<span class="sourceLineNo">903</span>    }<a name="line.903"></a>
-<span class="sourceLineNo">904</span>  }<a name="line.904"></a>
-<span class="sourceLineNo">905</span><a name="line.905"></a>
-<span class="sourceLineNo">906</span>  @Override<a name="line.906"></a>
-<span class="sourceLineNo">907</span>  public GetClusterStatusResponse getClusterStatus(RpcController controller,<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      GetClusterStatusRequest req) throws ServiceException {<a name="line.908"></a>
-<span class="sourceLineNo">909</span>    GetClusterStatusResponse.Builder response = GetClusterStatusResponse.newBuilder();<a name="line.909"></a>
-<span class="sourceLineNo">910</span>    try {<a name="line.910"></a>
-<span class="sourceLineNo">911</span>      master.checkInitialized();<a name="line.911"></a>
-<span class="sourceLineNo">912</span>      response.setClusterStatus(ClusterMetricsBuilder.toClusterStatus(<a name="line.912"></a>
-<span class="sourceLineNo">913</span>        master.getClusterMetrics(ClusterMetricsBuilder.toOptions(req.getOptionsList()))));<a name="line.913"></a>
-<span class="sourceLineNo">914</span>    } catch (IOException e) {<a name="line.914"></a>
-<span class="sourceLineNo">915</span>      throw new ServiceException(e);<a name="line.915"></a>
-<span class="sourceLineNo">916</span>    }<a name="line.916"></a>
-<span class="sourceLineNo">917</span>    return response.build();<a name="line.917"></a>
-<span class="sourceLineNo">918</span>  }<a name="line.918"></a>
-<span class="sourceLineNo">919</span><a name="line.919"></a>
-<span class="sourceLineNo">920</span>  /**<a name="line.920"></a>
-<span class="sourceLineNo">921</span>   * List the currently available/stored snapshots. Any in-progress snapshots are ignored<a name="line.921"></a>
-<span class="sourceLineNo">922</span>   */<a name="line.922"></a>
-<span class="sourceLineNo">923</span>  @Override<a name="line.923"></a>
-<span class="sourceLineNo">924</span>  public GetCompletedSnapshotsResponse getCompletedSnapshots(RpcController controller,<a name="line.924"></a>
-<span class="sourceLineNo">925</span>      GetCompletedSnapshotsRequest request) throws ServiceException {<a name="line.925"></a>
-<span class="sourceLineNo">926</span>    try {<a name="line.926"></a>
-<span class="sourceLineNo">927</span>      master.checkInitialized();<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      GetCompletedSnapshotsResponse.Builder builder = GetCompletedSnapshotsResponse.newBuilder();<a name="line.928"></a>
-<span class="sourceLineNo">929</span>      List&lt;SnapshotDescription&gt; snapshots = master.snapshotManager.getCompletedSnapshots();<a name="line.929"></a>
-<span class="sourceLineNo">930</span><a name="line.930"></a>
-<span class="sourceLineNo">931</span>      // convert to protobuf<a name="line.931"></a>
-<span class="sourceLineNo">932</span>      for (SnapshotDescription snapshot : snapshots) {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>        builder.addSnapshots(snapshot);<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      }<a name="line.934"></a>
-<span class="sourceLineNo">935</span>      return builder.build();<a name="line.935"></a>
-<span class="sourceLineNo">936</span>    } catch (IOException e) {<a name="line.936"></a>
-<span class="sourceLineNo">937</span>      throw new ServiceException(e);<a name="line.937"></a>
-<span class="sourceLineNo">938</span>    }<a name="line.938"></a>
-<span class="sourceLineNo">939</span>  }<a name="line.939"></a>
-<span class="sourceLineNo">940</span><a name="line.940"></a>
-<span class="sourceLineNo">941</span>  @Override<a name="line.941"></a>
-<span class="sourceLineNo">942</span>  public GetNamespaceDescriptorResponse getNamespaceDescriptor(<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      RpcController controller, GetNamespaceDescriptorRequest request)<a name="line.943"></a>
-<span class="sourceLineNo">944</span>      throws ServiceException {<a name="line.944"></a>
-<span class="sourceLineNo">945</span>    try {<a name="line.945"></a>
-<span class="sourceLineNo">946</span>      return GetNamespaceDescriptorResponse.newBuilder()<a name="line.946"></a>
-<span class="sourceLineNo">947</span>        .setNamespaceDescriptor(ProtobufUtil.toProtoNamespaceDescriptor(<a name="line.947"></a>
-<span class="sourceLineNo">948</span>            master.getNamespace(request.getNamespaceName())))<a name="line.948"></a>
-<span class="sourceLineNo">949</span>        .build();<a name="line.949"></a>
-<span class="sourceLineNo">950</span>    } catch (IOException e) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>      throw new ServiceException(e);<a name="line.951"></a>
-<span class="sourceLineNo">952</span>    }<a name="line.952"></a>
-<span class="sourceLineNo">953</span>  }<a name="line.953"></a>
-<span class="sourceLineNo">954</span><a name="line.954"></a>
-<span class="sourceLineNo">955</span>  /**<a name="line.955"></a>
-<span class="sourceLineNo">956</span>   * Get the number of regions of the table that have been updated by the alter.<a name="line.956"></a>
-<span class="sourceLineNo">957</span>   *<a name="line.957"></a>
-<span class="sourceLineNo">958</span>   * @return Pair indicating the number of regions updated Pair.getFirst is the<a name="line.958"></a>
-<span class="sourceLineNo">959</span>   *         regions that are yet to be updated Pair.getSecond is the total number<a name="line.959"></a>
-<span class="sourceLineNo">960</span>   *         of regions of the table<a name="line.960"></a>
-<span class="sourceLineNo">961</span>   * @throws ServiceException<a name="line.961"></a>
-<span class="sourceLineNo">962</span>   */<a name="line.962"></a>
-<span class="sourceLineNo">963</span>  @Override<a name="line.963"></a>
-<span class="sourceLineNo">964</span>  public GetSchemaAlterStatusResponse getSchemaAlterStatus(<a name="line.964"></a>
-<span class="sourceLineNo">965</span>      RpcController controller, GetSchemaAlterStatusRequest req) throws ServiceException {<a name="line.965"></a>
-<span class="sourceLineNo">966</span>    // TODO: currently, we query using the table name on the client side. this<a name="line.966"></a>
-<span class="sourceLineNo">967</span>    // may overlap with other table operations or the table operation may<a name="line.967"></a>
-<span class="sourceLineNo">968</span>    // have completed before querying this API. We need to refactor to a<a name="line.968"></a>
-<span class="sourceLineNo">969</span>    // transaction system in the future to avoid these ambiguities.<a name="line.969"></a>
-<span class="sourceLineNo">970</span>    TableName tableName = ProtobufUtil.toTableName(req.getTableName());<a name="line.970"></a>
-<span class="sourceLineNo">971</span><a name="line.971"></a>
-<span class="sourceLineNo">972</span>    try {<a name="line.972"></a>
-<span class="sourceLineNo">973</span>      master.checkInitialized();<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      Pair&lt;Integer,Integer&gt; pair = master.getAssignmentManager().getReopenStatus(tableName);<a name="line.974"></a>
-<span class="sourceLineNo">975</span>      GetSchemaAlterStatusResponse.Builder ret = GetSchemaAlterStatusResponse.newBuilder();<a name="line.975"></a>
-<span class="sourceLineNo">976</span>      ret.setYetToUpdateRegions(pair.getFirst());<a name="line.976"></a>
-<span class="sourceLineNo">977</span>      ret.setTotalRegions(pair.getSecond());<a name="line.977"></a>
-<span class="sourceLineNo">978</span>      return ret.build();<a name="line.978"></a>
-<span class="sourceLineNo">979</span>    } catch (IOException ioe) {<a name="line.979"></a>
-<span class="sourceLineNo">980</span>      throw new ServiceException(ioe);<a name="line.980"></a>
-<span class="sourceLineNo">981</span>    }<a name="line.981"></a>
-<span class="sourceLineNo">982</span>  }<a name="line.982"></a>
-<span class="sourceLineNo">983</span><a name="line.983"></a>
-<span class="sourceLineNo">984</span>  /**<a name="line.984"></a>
-<span class="sourceLineNo">985</span>   * Get list of TableDescriptors for requested tables.<a name="line.985"></a>
-<span class="sourceLineNo">986</span>   * @param c Unused (set to null).<a name="line.986"></a>
-<span class="sourceLineNo">987</span>   * @param req GetTableDescriptorsRequest that contains:<a name="line.987"></a>
-<span class="sourceLineNo">988</span>   * - tableNames: requested tables, or if empty, all are requested<a name="line.988"></a>
-<span class="sourceLineNo">989</span>   * @return GetTableDescriptorsResponse<a name="line.989"></a>
-<span class="sourceLineNo">990</span>   * @throws ServiceException<a name="line.990"></a>
-<span class="sourceLineNo">991</span>   */<a name="line.991"></a>
-<span class="sourceLineNo">992</span>  @Override<a name="line.992"></a>
-<span class="sourceLineNo">993</span>  public GetTableDescriptorsResponse getTableDescriptors(RpcController c,<a name="line.993"></a>
-<span class="sourceLineNo">994</span>      GetTableDescriptorsRequest req) throws ServiceException {<a name="line.994"></a>
-<span class="sourceLineNo">995</span>    try {<a name="line.995"></a>
-<span class="sourceLineNo">996</span>      master.checkInitialized();<a name="line.996"></a>
-<span class="sourceLineNo">997</span><a name="line.997"></a>
-<span class="sourceLineNo">998</span>      final String regex = req.hasRegex() ? req.getRegex() : null;<a name="line.998"></a>
-<span class="sourceLineNo">999</span>      final String namespace = req.hasNamespace() ? req.getNamespace() : null;<a name="line.999"></a>
-<span class="sourceLineNo">1000</span>      List&lt;TableName&gt; tableNameList = null;<a name="line.1000"></a>
-<span class="sourceLineNo">1001</span>      if (req.getTableNamesCount() &gt; 0) {<a name="line.1001"></a>
-<span class="sourceLineNo">1002</span>        tableNameList = new ArrayList&lt;TableName&gt;(req.getTableNamesCount());<a name="line.1002"></a>
-<span class="sourceLineNo">1003</span>        for (HBaseProtos.TableName tableNamePB: req.getTableNamesList()) {<a name="line.1003"></a>
-<span class="sourceLineNo">1004</span>          tableNameList.add(ProtobufUtil.toTableName(tableNamePB));<a name="line.1004"></a>
-<span class="sourceLineNo">1005</span>        }<a name="line.1005"></a>
-<span class="sourceLineNo">1006</span>      }<a name="line.1006"></a>
-<span class="sourceLineNo">1007</span><a name="line.1007"></a>
-<span class="sourceLineNo">1008</span>      List&lt;TableDescriptor&gt; descriptors = master.listTableDescriptors(namespace, regex,<a name="line.1008"></a>
-<span class="sourceLineNo">1009</span>          tableNameList, req.getIncludeSysTables());<a name="line.1009"></a>
-<span class="sourceLineNo">1010</span><a name="line.1010"></a>
-<span class="sourceLineNo">1011</span>      GetTableDescriptorsResponse.Builder builder = GetTableDescriptorsResponse.newBuilder();<a name="line.1011"></a>
-<span class="sourceLineNo">1012</span>      if (descriptors != null &amp;&amp; descriptors.size() &gt; 0) {<a name="line.1012"></a>
-<span class="sourceLineNo">1013</span>        // Add the table descriptors to the response<a name="line.1013"></a>
-<span class="sourceLineNo">1014</span>        for (TableDescriptor htd: descriptors) {<a name="line.1014"></a>
-<span class="sourceLineNo">1015</span>          builder.addTableSchema(ProtobufUtil.toTableSchema(htd));<a name="line.1015"></a>
-<span class="sourceLineNo">1016</span>        }<a name="line.1016"></a>
-<span class="sourceLineNo">1017</span>      }<a name="line.1017"></a>
-<span class="sourceLineNo">1018</span>      return builder.build();<a name="line.1018"></a>
-<span class="sourceLineNo">1019</span>    } catch (IOException ioe) {<a name="line.1019"></a>
-<span class="sourceLineNo">1020</span>      throw new ServiceException(ioe);<a name="line.1020"></a>
-<span class="sourceLineNo">1021</span>    }<a name="line.1021"></a>
-<span class="sourceLineNo">1022</span>  }<a name="line.1022"></a>
-<span class="sourceLineNo">1023</span><a name="line.1023"></a>
-<span class="sourceLineNo">1024</span>  /**<a name="line.1024"></a>
-<span class="sourceLineNo">1025</span>   * Get list of userspace table names<a name="line.1025"></a>
-<span class="sourceLineNo">1026</span>   * @param controller Unused (set to null).<a name="line.1026"></a>
-<span class="sourceLineNo">1027</span>   * @param req GetTableNamesRequest<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>   * @return GetTableNamesResponse<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>   * @throws ServiceException<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>   */<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>  @Override<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>  public GetTableNamesResponse getTableNames(RpcController controller,<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>      GetTableNamesRequest req) throws ServiceException {<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>    try {<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>      master.checkServiceStarted();<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span><a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>      final String regex = req.hasRegex() ? req.getRegex() : null;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>      final String namespace = req.hasNamespace() ? req.getNamespace() : null;<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>      List&lt;TableName&gt; tableNames = master.listTableNames(namespace, regex,<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          req.getIncludeSysTables());<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span><a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>      GetTableNamesResponse.Builder builder = GetTableNamesResponse.newBuilder();<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>      if (tableNames != null &amp;&amp; tableNames.size() &gt; 0) {<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>        // Add the table names to the response<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        for (TableName table: tableNames) {<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span>          builder.addTableNames(ProtobufUtil.toProtoTableName(table));<a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        }<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>      }<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>      return builder.build();<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>    } catch (IOException e) {<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>      throw new ServiceException(e);<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>    }<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span>  }<a name="line.1053"></a>
-<span class="sourceLineNo">1054</span><a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>  @Override<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>  public GetTableStateResponse getTableState(RpcController controller,<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>      GetTableStateRequest request) throws ServiceException {<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>    try {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>      master.checkServiceStarted();<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>      TableName tableName = ProtobufUtil.toTableName(request.getTableName());<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>      TableState ts = master.getTableStateManager().getTableState(tableName);<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      GetTableStateResponse.Builder builder = GetTableStateResponse.newBuilder();<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>      builder.setTableState(ts.convert());<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>      return builder.build();<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>    } catch (IOException e) {<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      throw new ServiceException(e);<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>    }<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>  }<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span><a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>  @Override<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>  public IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled(RpcController c,<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span>      IsCatalogJanitorEnabledRequest req) throws ServiceException {<a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    return IsCatalogJanitorEnabledResponse.newBuilder().setValue(<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>      master.isCatalogJanitorEnabled()).build();<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>  }<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span><a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>  @Override<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>  public IsCleanerChoreEnabledResponse isCleanerChoreEnabled(RpcController c,<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>                                                             IsCleanerChoreEnabledRequest req)<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    throws ServiceException {<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span>    return IsCleanerChoreEnabledResponse.newBuilder().setValue(master.isCleanerChoreEnabled())<a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>                                        .build();<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>  }<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span><a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  @Override<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span>  public IsMasterRunningResponse isMasterRunning(RpcController c,<a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>      IsMasterRunningRequest req) throws ServiceException {<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>    try {<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>      master.checkServiceStarted();<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>      return IsMasterRunningResponse.newBuilder().setIsMasterRunning(<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>        !master.isStopped()).build();<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>    } catch (IOException e) {<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>      throw new ServiceException(e);<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>    }<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  }<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span><a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>  /**<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>   * Checks if the specified procedure is done.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span>   * @return true if the procedure is done, false if the procedure is in the process of completing<a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>   * @throws ServiceException if invalid procedure or failed procedure with progress failure reason.<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>   */<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>  @Override<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>  public IsProcedureDoneResponse isProcedureDone(RpcController controller,<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span>      IsProcedureDoneRequest request) throws ServiceException {<a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    try {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      master.checkInitialized();<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      ProcedureDescription desc = request.getProcedure();<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>      MasterProcedureManager mpm = master.getMasterProcedureManagerHost().getProcedureManager(<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span>        desc.getSignature());<a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>      if (mpm == null) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>        throw new ServiceException("The procedure is not registered: "<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>          + desc.getSignature());<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>      }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span>      LOG.debug("Checking to see if procedure from request:"<a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>        + desc.getSignature() + " is done");<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span><a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      IsProcedureDoneResponse.Builder builder =<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>        IsProcedureDoneResponse.newBuilder();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      boolean done = mpm.isProcedureDone(desc);<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      builder.setDone(done);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>      return builder.build();<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span>    } catch (ForeignException e) {<a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>      throw new ServiceException(e.getCause());<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    } catch (IOException e) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      throw new ServiceException(e);<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>    }<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>  }<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span><a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>  /**<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>   * Checks if the specified snapshot is done.<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span>   * @return true if the snapshot is in file system ready to use,<a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>   *   false if the snapshot is in the process of completing<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>   * @throws ServiceException wrapping UnknownSnapshotException if invalid snapshot, or<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span>   *  a wrapped HBaseSnapshotException with progress failure reason.<a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>   */<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>  @Override<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>  public IsSnapshotDoneResponse isSnapshotDone(RpcController controller,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>      IsSnapshotDoneRequest request) throws ServiceException {<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>    LOG.debug("Checking to see if snapshot from request:" +<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>      ClientSnapshotDescriptionUtils.toString(request.getSnapshot()) + " is done");<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>    try {<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>      master.checkInitialized();<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>      IsSnapshotDoneResponse.Builder builder = IsSnapshotDoneResponse.newBuilder();<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>      boolean done = master.snapshotManager.isSnapshotDone(request.getSnapshot());<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>      builder.setDone(done);<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>      return builder.build();<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    } catch (ForeignException e) {<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>      throw new ServiceException(e.getCause());<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    } catch (IOException e) {<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      throw new ServiceException(e);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    }<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>  }<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span><a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>  @Override<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>  public GetProcedureResultResponse getProcedureResult(RpcController controller,<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span>      GetProcedureResultRequest request) throws ServiceException {<a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>    LOG.debug("Checking to see if procedure is done pid=" + request.getProcId());<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>    try {<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>      master.checkInitialized();<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      GetProcedureResultResponse.Builder builder = GetProcedureResultResponse.newBuilder();<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span>      long procId = request.getProcId();<a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      ProcedureExecutor&lt;?&gt; executor = master.getMasterProcedureExecutor();<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      Procedure&lt;?&gt; result = executor.getResultOrProcedure(procId);<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      if (result != null) {<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span>        builder.setSubmittedTime(result.getSubmittedTime());<a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>        builder.setLastUpdate(result.getLastUpdate());<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>        if (executor.isFinished(procId)) {<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>          builder.setState(GetProcedureResultResponse.State.FINISHED);<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>          if (result.isFailed()) {<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>            IOException exception = result.getException().unwrapRemoteIOException();<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>            builder.setException(ForeignExceptionUtil.toProtoForeignException(exception));<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>          }<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>          byte[] resultData = result.getResult();<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>          if (resultData != null) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>            builder.setResult(UnsafeByteOperations.unsafeWrap(resultData));<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>          }<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>          master.getMasterProcedureExecutor().removeResult(request.getProcId());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        } else {<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          builder.setState(GetProcedureResultResponse.State.RUNNING);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        }<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span>      } else {<a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        builder.setState(GetProcedureResultResponse.State.NOT_FOUND);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>      }<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span>      return builder.build();<a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>    } catch (IOException e) {<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      throw new ServiceException(e);<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>    }<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>  }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span><a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  @Override<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span>  public AbortProcedureResponse abortProcedure(<a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>      RpcController rpcController, AbortProcedureRequest request) throws ServiceException {<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>    try {<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>      AbortProcedureResponse.Builder response = AbortProcedureResponse.newBuilder();<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>      boolean abortResult =<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>          master.abortProcedure(request.getProcId(), request.getMayInterruptIfRunning());<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>      response.setIsProcedureAborted(abortResult);<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>      return response.build();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    } catch (IOException e) {<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>      throw new ServiceException(e);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    }<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>  }<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span><a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>  @Override<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span>  public ListNamespaceDescriptorsResponse listNamespaceDescriptors(RpcController c,<a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>      ListNamespaceDescriptorsRequest request) throws ServiceException {<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    try {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      ListNamespaceDescriptorsResponse.Builder response =<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>        ListNamespaceDescriptorsResponse.newBuilder();<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      for(NamespaceDescriptor ns: master.getNamespaces()) {<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span>        response.addNamespaceDescriptor(ProtobufUtil.toProtoNamespaceDescriptor(ns));<a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      }<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      return response.build();<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>    } catch (IOException e) {<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span>      throw new ServiceException(e);<a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>    }<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>  }<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>  @Override<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>  public GetProceduresResponse getProcedures(<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      RpcController rpcController,<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      GetProceduresRequest request) throws ServiceException {<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span>    try {<a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      final GetProceduresResponse.Builder response = GetProceduresResponse.newBuilder();<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>      for (Procedure&lt;?&gt; p: master.getProcedures()) {<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>        response.addProcedure(ProcedureUtil.convertToProtoProcedure(p));<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span>      }<a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      return response.build();<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>    } catch (IOException e) {<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      throw new ServiceException(e);<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>    }<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>  }<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span><a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>  @Override<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>  public GetLocksResponse getLocks(<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      RpcController controller,<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span>      GetLocksRequest request) throws ServiceException {<a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>    try {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>      final GetLocksResponse.Builder builder = GetLocksResponse.newBuilder();<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span><a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>      for (LockedResource lockedResource: master.getLocks()) {<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>        builder.addLock(ProcedureUtil.convertToProtoLockedResource(lockedResource));<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span>      }<a name="line.1243"></a>
-<span class="sourceLineNo">1244</span><a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>      return builder.build();<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    } catch (IOException e) {<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>      throw new ServiceException(e);<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    }<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>  }<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span><a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>  @Override<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>  public ListTableDescriptorsByNamespaceResponse listTableDescriptorsByNamespace(RpcController c,<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      ListTableDescriptorsByNamespaceRequest request) throws ServiceException {<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    try {<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>      ListTableDescriptorsByNamespaceResponse.Builder b =<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>          ListTableDescriptorsByNamespaceResponse.newBuilder();<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>      for (TableDescriptor htd : master<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span>          .listTableDescriptorsByNamespace(request.getNamespaceName())) {<a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>        b.addTableSchema(ProtobufUtil.toTableSchema(htd));<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>      }<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>      return b.build();<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>    } catch (IOException e) {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>      throw new ServiceException(e);<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>    }<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>  }<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span><a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>  @Override<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>  public ListTableNamesByNamespaceResponse listTableNamesByNamespace(RpcController c,<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>      ListTableNamesByNamespaceRequest request) throws ServiceException {<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>    try {<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span>      ListTableNamesByNamespaceResponse.Builder b =<a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>        ListTableNamesByNamespaceResponse.newBuilder();<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>      for (TableName tableName: master.listTableNamesByNamespace(request.getNamespaceName())) {<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>        b.addTableName(ProtobufUtil.toProtoTableName(tableName));<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>      }<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>      return b.build();<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    } catch (IOException e) {<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>      throw new ServiceException(e);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    }<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>  }<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span><a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>  @Override<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>  public ModifyColumnResponse modifyColumn(RpcController controller,<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>      ModifyColumnRequest req) throws ServiceException {<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>    try {<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>      long procId = master.modifyColumn(<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>        ProtobufUtil.toTableName(req.getTableName()),<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>        ProtobufUtil.toColumnFamilyDescriptor(req.getColumnFamilies()),<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>        req.getNonceGroup(),<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>        req.getNonce());<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span>      if (procId == -1) {<a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>        // This mean operation was not performed in server, so do not set any procId<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>        return ModifyColumnResponse.newBuilder().build();<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>      } else {<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span>        return ModifyColumnResponse.newBuilder().setProcId(procId).build();<a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>      }<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>    } catch (IOException ioe) {<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>      throw new ServiceException(ioe);<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>    }<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  }<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span><a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>  @Override<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>  public ModifyNamespaceResponse modifyNamespace(RpcController controller,<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>      ModifyNamespaceRequest request) throws ServiceException {<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>    try {<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      long procId = master.modifyNamespace(<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>        ProtobufUtil.toNamespaceDescriptor(request.getNamespaceDescriptor()),<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>        request.getNonceGroup(),<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span>        request.getNonce());<a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>      return ModifyNamespaceResponse.newBuilder().setProcId(procId).build();<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>    } catch (IOException e) {<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      throw new ServiceException(e);<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span>  }<a name="line.1314"></a>
-<span class="sourceLineNo">1315</span><a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>  @Override<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>  public ModifyTableResponse modifyTable(RpcController controller,<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>      ModifyTableRequest req) throws ServiceException {<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>    try {<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span>      long procId = master.modifyTable(<a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>        ProtobufUtil.toTableName(req.getTableName()),<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>        ProtobufUtil.toTableDescriptor(req.getTableSchema()),<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>        req.getNonceGroup(),<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>        req.getNonce());<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>      return ModifyTableResponse.newBuilder().setProcId(procId).build();<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    } catch (IOException ioe) {<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>      throw new ServiceException(ioe);<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    }<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>  }<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span><a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>  @Override<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  public MoveRegionResponse moveRegion(RpcController controller,<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span>      MoveRegionRequest req) throws ServiceException {<a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>    final byte [] encodedRegionName = req.getRegion().getValue().toByteArray();<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>    RegionSpecifierType type = req.getRegion().getType();<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>    final byte [] destServerName = (req.hasDestServerName())?<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>      Bytes.toBytes(ProtobufUtil.toServerName(req.getDestServerName()).getServerName()):null;<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>    MoveRegionResponse mrr = MoveRegionResponse.newBuilder().build();<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span><a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    if (type != RegionSpecifierType.ENCODED_REGION_NAME) {<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>      LOG.warn("moveRegion specifier type: expected: " + RegionSpecifierType.ENCODED_REGION_NAME<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span>        + " actual: " + type);<a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    }<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span><a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    try {<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span>      master.checkInitialized();<a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>      master.move(encodedRegionName, destServerName);<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>    } catch (IOException ioe) {<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      throw new ServiceException(ioe);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span>    return mrr;<a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>  }<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span><a name="line.1353"></a>
-<span class="sourceLineNo">1354</span>  /**<a name="line.

<TRUNCATED>

[07/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/plugins.html
----------------------------------------------------------------------
diff --git a/plugins.html b/plugins.html
index 133655d..d889203 100644
--- a/plugins.html
+++ b/plugins.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Plugins</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -375,7 +375,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/poweredbyhbase.html
----------------------------------------------------------------------
diff --git a/poweredbyhbase.html b/poweredbyhbase.html
index a8bae52..7a002ac 100644
--- a/poweredbyhbase.html
+++ b/poweredbyhbase.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Powered By Apache HBase™</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -769,7 +769,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/project-info.html
----------------------------------------------------------------------
diff --git a/project-info.html b/project-info.html
index 95c4bbe..c3c3dc8 100644
--- a/project-info.html
+++ b/project-info.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Information</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -335,7 +335,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/project-reports.html
----------------------------------------------------------------------
diff --git a/project-reports.html b/project-reports.html
index ed5b94c..c891241 100644
--- a/project-reports.html
+++ b/project-reports.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Generated Reports</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -305,7 +305,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/project-summary.html
----------------------------------------------------------------------
diff --git a/project-summary.html b/project-summary.html
index d2d95c0..fd407ec 100644
--- a/project-summary.html
+++ b/project-summary.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Summary</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -331,7 +331,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/pseudo-distributed.html
----------------------------------------------------------------------
diff --git a/pseudo-distributed.html b/pseudo-distributed.html
index 8263a90..66f093c 100644
--- a/pseudo-distributed.html
+++ b/pseudo-distributed.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
 Running Apache HBase (TM) in pseudo-distributed mode
@@ -308,7 +308,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/replication.html
----------------------------------------------------------------------
diff --git a/replication.html b/replication.html
index cca75ea..c69b6039 100644
--- a/replication.html
+++ b/replication.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Apache HBase (TM) Replication
@@ -303,7 +303,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/resources.html
----------------------------------------------------------------------
diff --git a/resources.html b/resources.html
index 2efa30f..75ca753 100644
--- a/resources.html
+++ b/resources.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Other Apache HBase (TM) Resources</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -331,7 +331,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/source-repository.html
----------------------------------------------------------------------
diff --git a/source-repository.html b/source-repository.html
index 52eb611..4f679d2 100644
--- a/source-repository.html
+++ b/source-repository.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Source Code Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -299,7 +299,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/sponsors.html
----------------------------------------------------------------------
diff --git a/sponsors.html b/sponsors.html
index 9e2a89b..681219a 100644
--- a/sponsors.html
+++ b/sponsors.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBase™ Sponsors</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -333,7 +333,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/supportingprojects.html
----------------------------------------------------------------------
diff --git a/supportingprojects.html b/supportingprojects.html
index 996b1fe..a4764fd 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Supporting Projects</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -520,7 +520,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/team-list.html
----------------------------------------------------------------------
diff --git a/team-list.html b/team-list.html
index 77a3b89..d91dfd8 100644
--- a/team-list.html
+++ b/team-list.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180801" />
+    <meta name="Date-Revision-yyyymmdd" content="20180802" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Team</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -742,7 +742,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-08-01</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-08-02</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/testapidocs/index-all.html
----------------------------------------------------------------------
diff --git a/testapidocs/index-all.html b/testapidocs/index-all.html
index d650cac..e81b877 100644
--- a/testapidocs/index-all.html
+++ b/testapidocs/index-all.html
@@ -819,6 +819,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#killMaster-org.apache.hadoop.hbase.ServerName-">killMaster(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#killNameNode-org.apache.hadoop.hbase.ServerName-">killNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#killRegionServer-org.apache.hadoop.hbase.ServerName-">killRegionServer(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#killZkNode-org.apache.hadoop.hbase.ServerName-">killZkNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
@@ -1253,6 +1255,8 @@
 <dd>
 <div class="block">Call this if you only want a zk cluster.</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#startNameNode-org.apache.hadoop.hbase.ServerName-">startNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#startRegionServer-java.lang.String-int-">startRegionServer(String, int)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#startRegionServer--">startRegionServer()</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
@@ -1277,6 +1281,8 @@
 <dd>
 <div class="block">Shut down the specified master cleanly</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#stopNameNode-org.apache.hadoop.hbase.ServerName-">stopNameNode(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#stopRegionServer-org.apache.hadoop.hbase.ServerName-">stopRegionServer(ServerName)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#stopRegionServer-int-">stopRegionServer(int)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
@@ -1374,6 +1380,10 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForMasterToStop-org.apache.hadoop.hbase.ServerName-long-">waitForMasterToStop(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForNameNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStart(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForNameNodeToStop-org.apache.hadoop.hbase.ServerName-long-">waitForNameNodeToStop(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForRegionServerToStop-org.apache.hadoop.hbase.ServerName-long-">waitForRegionServerToStop(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MiniHBaseCluster.html#waitForZkNodeToStart-org.apache.hadoop.hbase.ServerName-long-">waitForZkNodeToStart(ServerName, long)</a></span> - Method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MiniHBaseCluster.html" title="class in org.apache.hadoop.hbase">MiniHBaseCluster</a></dt>


[22/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html
index b7b4236..3d1edb3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html
@@ -259,1863 +259,1867 @@
 <span class="sourceLineNo">251</span>   * + Metadata!  + &lt;= See note on BLOCK_METADATA_SPACE above.<a name="line.251"></a>
 <span class="sourceLineNo">252</span>   * ++++++++++++++<a name="line.252"></a>
 <span class="sourceLineNo">253</span>   * &lt;/code&gt;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer)<a name="line.254"></a>
+<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer, boolean)<a name="line.254"></a>
 <span class="sourceLineNo">255</span>   */<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER =<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    @Override<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      // The buf has the file block followed by block metadata.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      ByteBuff newByteBuff;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      if (reuse) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        newByteBuff = buf.slice();<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      } else {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        int len = buf.limit();<a name="line.269"></a>
-<span class="sourceLineNo">270</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      buf.position(buf.limit());<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      long offset = buf.getLong();<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      HFileBlock hFileBlock =<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      return hFileBlock;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    public int getDeserialiserIdentifier() {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return DESERIALIZER_IDENTIFIER;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    @Override<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      // Used only in tests<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  };<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  static {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    DESERIALIZER_IDENTIFIER =<a name="line.298"></a>
-<span class="sourceLineNo">299</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>  }<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  /**<a name="line.302"></a>
-<span class="sourceLineNo">303</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.303"></a>
-<span class="sourceLineNo">304</span>   */<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private HFileBlock(HFileBlock that) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    this(that, false);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
-<span class="sourceLineNo">308</span><a name="line.308"></a>
-<span class="sourceLineNo">309</span>  /**<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.310"></a>
-<span class="sourceLineNo">311</span>   * param.<a name="line.311"></a>
-<span class="sourceLineNo">312</span>   */<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.314"></a>
-<span class="sourceLineNo">315</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    if (bufCopy) {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    } else {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.buf = that.buf.duplicate();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    }<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * is used only while writing blocks and caching,<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   *<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.331"></a>
+<span class="sourceLineNo">256</span>  public static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER = new BlockDeserializer();<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  public static final class BlockDeserializer implements CacheableDeserializer&lt;Cacheable&gt; {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    private BlockDeserializer() {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>    @Override<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        throws IOException {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      // The buf has the file block followed by block metadata.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      ByteBuff newByteBuff;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      if (reuse) {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>        newByteBuff = buf.slice();<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      } else {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        int len = buf.limit();<a name="line.273"></a>
+<span class="sourceLineNo">274</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      buf.position(buf.limit());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      long offset = buf.getLong();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      HFileBlock hFileBlock =<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      return hFileBlock;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>    @Override<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    public int getDeserialiserIdentifier() {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      return DESERIALIZER_IDENTIFIER;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>    @Override<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      // Used only in tests<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  static {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    DESERIALIZER_IDENTIFIER =<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  }<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /**<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   */<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private HFileBlock(HFileBlock that) {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    this(that, false);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * param.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.318"></a>
+<span class="sourceLineNo">319</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    if (bufCopy) {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    } else {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>      this.buf = that.buf.duplicate();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    }<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * is used only while writing blocks and caching,<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.331"></a>
 <span class="sourceLineNo">332</span>   *<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @param offset the file offset the block was read from<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   * @param fileContext HFile meta data<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  @VisibleForTesting<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      HFileContext fileContext) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    this.buf = new SingleByteBuff(b);<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    if (fillHeader) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      overwriteHeader();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    this.buf.rewind();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  /**<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * to that point.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>    buf.rewind();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    final BlockType blockType = BlockType.read(buf);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    final int uncompressedSizeWithoutHeader =<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    int onDiskDataSizeWithHeader;<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    if (usesHBaseChecksum) {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    fileContext = fileContextBuilder.build();<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    this.memType = memType;<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    this.offset = offset;<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    this.buf = buf;<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    this.buf.rewind();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  }<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  /**<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   * Called from constructors.<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      HFileContext fileContext) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    this.blockType = blockType;<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    this.offset = offset;<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    this.fileContext = fileContext;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  /**<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * Parse total on disk size including header and checksum.<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   * @return Size of the block with header included.<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   */<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      boolean verifyChecksum) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      headerSize(verifyChecksum);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * a read of the next block when scanning or running over a file.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  int getNextBlockOnDiskSize() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    return nextBlockOnDiskSize;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  @Override<a name="line.439"></a>
-<span class="sourceLineNo">440</span>  public BlockType getBlockType() {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    return blockType;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  short getDataBlockEncodingId() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    return buf.getShort(headerSize());<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  }<a name="line.451"></a>
-<span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>  /**<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   * @return the on-disk size of header + data part + checksum.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>   */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  public int getOnDiskSizeWithHeader() {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  int getOnDiskSizeWithoutHeader() {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    return onDiskSizeWithoutHeader;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>  }<a name="line.465"></a>
-<span class="sourceLineNo">466</span><a name="line.466"></a>
-<span class="sourceLineNo">467</span>  /**<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.468"></a>
-<span class="sourceLineNo">469</span>   */<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   int getUncompressedSizeWithoutHeader() {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    return uncompressedSizeWithoutHeader;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /**<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   *         -1 if unknown<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  long getPrevBlockOffset() {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    return prevBlockOffset;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>  }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>  /**<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * is modified as side-effect.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   */<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  private void overwriteHeader() {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    buf.rewind();<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    blockType.write(buf);<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    buf.putLong(prevBlockOffset);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>  }<a name="line.497"></a>
-<span class="sourceLineNo">498</span><a name="line.498"></a>
-<span class="sourceLineNo">499</span>  /**<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * Returns a buffer that does not include the header or checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   *<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    ByteBuff dup = getBufferReadOnly();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    // Now set it up so Buffer spans content only -- no header or no checksums.<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    return dup.position(headerSize()).limit(buf.limit() - totalChecksumBytes()).slice();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Returns a read-only duplicate of the buffer this block stores internally ready to be read.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * Clients must not modify the buffer object though they may set position and limit on the<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   * returned buffer since we pass back a duplicate. This method has to be public because it is used<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * filter lookup, but has to be used with caution. Buffer holds header, block content,<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * and any follow-on checksums if present.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   *<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @return the buffer of this block for read-only operations<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   */<a name="line.519"></a>
-<span class="sourceLineNo">520</span>  public ByteBuff getBufferReadOnly() {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    // TODO: ByteBuf does not support asReadOnlyBuffer(). Fix.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    ByteBuff dup = this.buf.duplicate();<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    assert dup.position() == 0;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return dup;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  @VisibleForTesting<a name="line.527"></a>
-<span class="sourceLineNo">528</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String fieldName) throws IOException {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    if (valueFromBuf != valueFromField) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  @VisibleForTesting<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      throws IOException {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    if (valueFromBuf != valueFromField) {<a name="line.539"></a>
-<span class="sourceLineNo">540</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.540"></a>
-<span class="sourceLineNo">541</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  }<a name="line.543"></a>
-<span class="sourceLineNo">544</span><a name="line.544"></a>
-<span class="sourceLineNo">545</span>  /**<a name="line.545"></a>
-<span class="sourceLineNo">546</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.547"></a>
-<span class="sourceLineNo">548</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * This function is primary for testing and debugging, and is not<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * Used by tests only.<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   */<a name="line.552"></a>
-<span class="sourceLineNo">553</span>  @VisibleForTesting<a name="line.553"></a>
-<span class="sourceLineNo">554</span>  void sanityCheck() throws IOException {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>    // Duplicate so no side-effects<a name="line.555"></a>
-<span class="sourceLineNo">556</span>    ByteBuff dup = this.buf.duplicate().rewind();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    sanityCheckAssertion(BlockType.read(dup), blockType);<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    sanityCheckAssertion(dup.getInt(), onDiskSizeWithoutHeader, "onDiskSizeWithoutHeader");<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    sanityCheckAssertion(dup.getInt(), uncompressedSizeWithoutHeader,<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        "uncompressedSizeWithoutHeader");<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    sanityCheckAssertion(dup.getLong(), prevBlockOffset, "prevBlockOffset");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      sanityCheckAssertion(dup.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      sanityCheckAssertion(dup.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.567"></a>
-<span class="sourceLineNo">568</span>          "bytesPerChecksum");<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      sanityCheckAssertion(dup.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
-<span class="sourceLineNo">571</span><a name="line.571"></a>
-<span class="sourceLineNo">572</span>    int cksumBytes = totalChecksumBytes();<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (dup.limit() != expectedBufLimit) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      throw new AssertionError("Expected limit " + expectedBufLimit + ", got " + dup.limit());<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    int hdrSize = headerSize();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    if (dup.capacity() != expectedBufLimit &amp;&amp; dup.capacity() != expectedBufLimit + hdrSize) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      throw new AssertionError("Invalid buffer capacity: " + dup.capacity() +<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  @Override<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  public String toString() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    StringBuilder sb = new StringBuilder()<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      .append("[")<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      .append("blockType=").append(blockType)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      .append(", fileOffset=").append(offset)<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      .append(", headerSize=").append(headerSize())<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      .append(", onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      .append(", uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      .append(", prevBlockOffset=").append(prevBlockOffset)<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      .append(", isUseHBaseChecksum=").append(fileContext.isUseHBaseChecksum());<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      sb.append(", checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.599"></a>
-<span class="sourceLineNo">600</span>        .append(", bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        .append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      sb.append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    String dataBegin = null;<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    if (buf.hasArray()) {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    } else {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.613"></a>
-<span class="sourceLineNo">614</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>    sb.append(", getOnDiskSizeWithHeader=").append(getOnDiskSizeWithHeader())<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      .append(", totalChecksumBytes=").append(totalChecksumBytes())<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      .append(", isUnpacked=").append(isUnpacked())<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      .append(", buf=[").append(buf).append("]")<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      .append(", dataBeginsWith=").append(dataBegin)<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      .append(", fileContext=").append(fileContext)<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      .append(", nextBlockOnDiskSize=").append(nextBlockOnDiskSize)<a name="line.624"></a>
-<span class="sourceLineNo">625</span>      .append("]");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    return sb.toString();<a name="line.626"></a>
-<span class="sourceLineNo">627</span>  }<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>  /**<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      // encryption details.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();<a name="line.645"></a>
-<span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>    ByteBuff dup = this.buf.duplicate();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    dup.position(this.headerSize());<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    dup = dup.slice();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      dup);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    return unpacked;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  /**<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   * Always allocates a new buffer of the correct size. Copies header bytes<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * from the existing buffer. Does not change header fields.<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   * Reserve room to keep checksum bytes too.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  private void allocateBuffer() {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    int cksumBytes = totalChecksumBytes();<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    int headerSize = headerSize();<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    int capacityNeeded = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // TODO we need consider allocating offheap here?<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);<a name="line.667"></a>
-<span class="sourceLineNo">668</span><a name="line.668"></a>
-<span class="sourceLineNo">669</span>    // Copy header bytes into newBuf.<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    // newBuf is HBB so no issue in calling array()<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    buf.position(0);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    buf.get(newBuf.array(), newBuf.arrayOffset(), headerSize);<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    buf = new SingleByteBuff(newBuf);<a name="line.674"></a>
-<span class="sourceLineNo">675</span>    // set limit to exclude next block's header<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * Return true when this block's buffer has been unpacked, false otherwise. Note this is a<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   * calculated heuristic, not tracked attribute of the block.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>   */<a name="line.682"></a>
-<span class="sourceLineNo">683</span>  public boolean isUnpacked() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>    final int cksumBytes = totalChecksumBytes();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    final int headerSize = headerSize();<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    final int bufCapacity = buf.capacity();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;<a name="line.688"></a>
-<span class="sourceLineNo">689</span>  }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>  /** An additional sanity-check in case no compression or encryption is being used. */<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  @VisibleForTesting<a name="line.692"></a>
-<span class="sourceLineNo">693</span>  void sanityCheckUncompressedSize() throws IOException {<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader + totalChecksumBytes()) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      throw new IOException("Using no compression but "<a name="line.695"></a>
-<span class="sourceLineNo">696</span>          + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "<a name="line.696"></a>
-<span class="sourceLineNo">697</span>          + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader<a name="line.697"></a>
-<span class="sourceLineNo">698</span>          + ", numChecksumbytes=" + totalChecksumBytes());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    }<a name="line.699"></a>
-<span class="sourceLineNo">700</span>  }<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>  /**<a name="line.702"></a>
-<span class="sourceLineNo">703</span>   * Cannot be {@link #UNSET}. Must be a legitimate value. Used re-making the {@link BlockCacheKey} when<a name="line.703"></a>
-<span class="sourceLineNo">704</span>   * block is returned to the cache.<a name="line.704"></a>
-<span class="sourceLineNo">705</span>   * @return the offset of this block in the file it was read from<a name="line.705"></a>
-<span class="sourceLineNo">706</span>   */<a name="line.706"></a>
-<span class="sourceLineNo">707</span>  long getOffset() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    if (offset &lt; 0) {<a name="line.708"></a>
-<span class="sourceLineNo">709</span>      throw new IllegalStateException("HFile block offset not initialized properly");<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    }<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    return offset;<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  /**<a name="line.714"></a>
-<span class="sourceLineNo">715</span>   * @return a byte stream reading the data + checksum of this block<a name="line.715"></a>
-<span class="sourceLineNo">716</span>   */<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  DataInputStream getByteStream() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    ByteBuff dup = this.buf.duplicate();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    dup.position(this.headerSize());<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new DataInputStream(new ByteBuffInputStream(dup));<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>  public long heapSize() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    long size = ClassSize.align(<a name="line.725"></a>
-<span class="sourceLineNo">726</span>        ClassSize.OBJECT +<a name="line.726"></a>
-<span class="sourceLineNo">727</span>        // Block type, multi byte buffer, MemoryType and meta references<a name="line.727"></a>
-<span class="sourceLineNo">728</span>        4 * ClassSize.REFERENCE +<a name="line.728"></a>
-<span class="sourceLineNo">729</span>        // On-disk size, uncompressed size, and next block's on-disk size<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        // bytePerChecksum and onDiskDataSize<a name="line.730"></a>
-<span class="sourceLineNo">731</span>        4 * Bytes.SIZEOF_INT +<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        // This and previous block offset<a name="line.732"></a>
-<span class="sourceLineNo">733</span>        2 * Bytes.SIZEOF_LONG +<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        // Heap size of the meta object. meta will be always not null.<a name="line.734"></a>
-<span class="sourceLineNo">735</span>        fileContext.heapSize()<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    );<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    if (buf != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      // Deep overhead of the byte buffer. Needs to be aligned separately.<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    return ClassSize.align(size);<a name="line.743"></a>
-<span class="sourceLineNo">744</span>  }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>  /**<a name="line.746"></a>
-<span class="sourceLineNo">747</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.747"></a>
-<span class="sourceLineNo">748</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.748"></a>
-<span class="sourceLineNo">749</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but specifies a<a name="line.749"></a>
-<span class="sourceLineNo">750</span>   * number of "extra" bytes to also optionally read.<a name="line.750"></a>
-<span class="sourceLineNo">751</span>   *<a name="line.751"></a>
-<span class="sourceLineNo">752</span>   * @param in the input stream to read from<a name="line.752"></a>
-<span class="sourceLineNo">753</span>   * @param buf the buffer to read into<a name="line.753"></a>
-<span class="sourceLineNo">754</span>   * @param bufOffset the destination offset in the buffer<a name="line.754"></a>
-<span class="sourceLineNo">755</span>   * @param necessaryLen the number of bytes that are absolutely necessary to read<a name="line.755"></a>
-<span class="sourceLineNo">756</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.756"></a>
-<span class="sourceLineNo">757</span>   * @return true if succeeded reading the extra bytes<a name="line.757"></a>
-<span class="sourceLineNo">758</span>   * @throws IOException if failed to read the necessary bytes<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   */<a name="line.759"></a>
-<span class="sourceLineNo">760</span>  static boolean readWithExtra(InputStream in, byte[] buf,<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      int bufOffset, int necessaryLen, int extraLen) throws IOException {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    while (bytesRemaining &gt; 0) {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      int ret = in.read(buf, bufOffset, bytesRemaining);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      if (ret == -1 &amp;&amp; bytesRemaining &lt;= extraLen) {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        // We could not read the "extra data", but that is OK.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>        break;<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      }<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (ret &lt; 0) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>        throw new IOException("Premature EOF from inputStream (read "<a name="line.770"></a>
-<span class="sourceLineNo">771</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.771"></a>
-<span class="sourceLineNo">772</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.772"></a>
-<span class="sourceLineNo">773</span>            + "successfully read "<a name="line.773"></a>
-<span class="sourceLineNo">774</span>            + (necessaryLen + extraLen - bytesRemaining));<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      }<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      bufOffset += ret;<a name="line.776"></a>
-<span class="sourceLineNo">777</span>      bytesRemaining -= ret;<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    }<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    return bytesRemaining &lt;= 0;<a name="line.779"></a>
-<span class="sourceLineNo">780</span>  }<a name="line.780"></a>
-<span class="sourceLineNo">781</span><a name="line.781"></a>
-<span class="sourceLineNo">782</span>  /**<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.784"></a>
-<span class="sourceLineNo">785</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but uses<a name="line.785"></a>
-<span class="sourceLineNo">786</span>   * positional read and specifies a number of "extra" bytes that would be<a name="line.786"></a>
-<span class="sourceLineNo">787</span>   * desirable but not absolutely necessary to read.<a name="line.787"></a>
-<span class="sourceLineNo">788</span>   *<a name="line.788"></a>
-<span class="sourceLineNo">789</span>   * @param in the input stream to read from<a name="line.789"></a>
-<span class="sourceLineNo">790</span>   * @param position the position within the stream from which to start reading<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * @param buf the buffer to read into<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * @param bufOffset the destination offset in the buffer<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   * @param necessaryLen the number of bytes that are absolutely necessary to<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   *     read<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   * @return true if and only if extraLen is &gt; 0 and reading those extra bytes<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   *     was successful<a name="line.797"></a>
-<span class="sourceLineNo">798</span>   * @throws IOException if failed to read the necessary bytes<a name="line.798"></a>
-<span class="sourceLineNo">799</span>   */<a name="line.799"></a>
-<span class="sourceLineNo">800</span>  @VisibleForTesting<a name="line.800"></a>
-<span class="sourceLineNo">801</span>  static boolean positionalReadWithExtra(FSDataInputStream in,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      long position, byte[] buf, int bufOffset, int necessaryLen, int extraLen)<a name="line.802"></a>
-<span class="sourceLineNo">803</span>      throws IOException {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>    int bytesRead = 0;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    while (bytesRead &lt; necessaryLen) {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      int ret = in.read(position, buf, bufOffset, bytesRemaining);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>      if (ret &lt; 0) {<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        throw new IOException("Premature EOF from inputStream (positional read "<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            + "successfully read " + bytesRead);<a name="line.812"></a>
-<span class="sourceLineNo">813</span>      }<a name="line.813"></a>
-<span class="sourceLineNo">814</span>      position += ret;<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      bufOffset += ret;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      bytesRemaining -= ret;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      bytesRead += ret;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>    }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    return bytesRead != necessaryLen &amp;&amp; bytesRemaining &lt;= 0;<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>   * Unified version 2 {@link HFile} block writer. The intended usage pattern<a name="line.823"></a>
-<span class="sourceLineNo">824</span>   * is as follows:<a name="line.824"></a>
-<span class="sourceLineNo">825</span>   * &lt;ol&gt;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>   * &lt;li&gt;Construct an {@link HFileBlock.Writer}, providing a compression algorithm.<a name="line.826"></a>
-<span class="sourceLineNo">827</span>   * &lt;li&gt;Call {@link Writer#startWriting} and get a data stream to write to.<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * &lt;li&gt;Write your data into the stream.<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * &lt;li&gt;Call Writer#writeHeaderAndData(FSDataOutputStream) as many times as you need to.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   * store the serialized block into an external stream.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * &lt;li&gt;Repeat to write more blocks.<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   * &lt;/ol&gt;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   * &lt;p&gt;<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   */<a name="line.834"></a>
-<span class="sourceLineNo">835</span>  static class Writer {<a name="line.835"></a>
-<span class="sourceLineNo">836</span>    private enum State {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>      INIT,<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      WRITING,<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      BLOCK_READY<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
-<span class="sourceLineNo">841</span><a name="line.841"></a>
-<span class="sourceLineNo">842</span>    /** Writer state. Used to ensure the correct usage protocol. */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    private State state = State.INIT;<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>    /** Data block encoder used for data blocks */<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    private final HFileDataBlockEncoder dataBlockEncoder;<a name="line.846"></a>
-<span class="sourceLineNo">847</span><a name="line.847"></a>
-<span class="sourceLineNo">848</span>    private HFileBlockEncodingContext dataBlockEncodingCtx;<a name="line.848"></a>
-<span class="sourceLineNo">849</span><a name="line.849"></a>
-<span class="sourceLineNo">850</span>    /** block encoding context for non-data blocks*/<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;<a name="line.851"></a>
-<span class="sourceLineNo">852</span><a name="line.852"></a>
-<span class="sourceLineNo">853</span>    /**<a name="line.853"></a>
-<span class="sourceLineNo">854</span>     * The stream we use to accumulate data into a block in an uncompressed format.<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * We reset this stream at the end of each block and reuse it. The<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * header is written as the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes into this<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * stream.<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    private ByteArrayOutputStream baosInMemory;<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>    /**<a name="line.861"></a>
-<span class="sourceLineNo">862</span>     * Current block type. Set in {@link #startWriting(BlockType)}. Could be<a name="line.862"></a>
-<span class="sourceLineNo">863</span>     * changed in {@link #finishBlock()} from {@link BlockType#DATA}<a name="line.863"></a>
-<span class="sourceLineNo">864</span>     * to {@link BlockType#ENCODED_DATA}.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>     */<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    private BlockType blockType;<a name="line.866"></a>
-<span class="sourceLineNo">867</span><a name="line.867"></a>
-<span class="sourceLineNo">868</span>    /**<a name="line.868"></a>
-<span class="sourceLineNo">869</span>     * A stream that we write uncompressed bytes to, which compresses them and<a name="line.869"></a>
-<span class="sourceLineNo">870</span>     * writes them to {@link #baosInMemory}.<a name="line.870"></a>
-<span class="sourceLineNo">871</span>     */<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    private DataOutputStream userDataStream;<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // Size of actual data being written. Not considering the block encoding/compression. This<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    // includes the header size also.<a name="line.875"></a>
-<span class="sourceLineNo">876</span>    private int unencodedDataSizeWritten;<a name="line.876"></a>
+<span class="sourceLineNo">333</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.335"></a>
+<span class="sourceLineNo">336</span>   *<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * @param offset the file offset the block was read from<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * @param fileContext HFile meta data<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      HFileContext fileContext) {<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.352"></a>
+<span class="sourceLineNo">353</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    this.buf = new SingleByteBuff(b);<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    if (fillHeader) {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      overwriteHeader();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    this.buf.rewind();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * to that point.<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>    buf.rewind();<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    final BlockType blockType = BlockType.read(buf);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    final int uncompressedSizeWithoutHeader =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    int onDiskDataSizeWithHeader;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    if (usesHBaseChecksum) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    } else {<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    fileContext = fileContextBuilder.build();<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    this.memType = memType;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    this.offset = offset;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    this.buf = buf;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    this.buf.rewind();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  /**<a name="line.405"></a>
+<span class="sourceLineNo">406</span>   * Called from constructors.<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      HFileContext fileContext) {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>    this.blockType = blockType;<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    this.offset = offset;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    this.fileContext = fileContext;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * Parse total on disk size including header and checksum.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   * @return Size of the block with header included.<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      boolean verifyChecksum) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      headerSize(verifyChecksum);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.435"></a>
+<span class="sourceLineNo">436</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.436"></a>
+<span class="sourceLineNo">437</span>   * a read of the next block when scanning or running over a file.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  int getNextBlockOnDiskSize() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>    return nextBlockOnDiskSize;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  public BlockType getBlockType() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    return blockType;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  short getDataBlockEncodingId() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>    }<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    return buf.getShort(headerSize());<a name="line.454"></a>
+<span class="sourceLineNo">455</span>  }<a name="line.455"></a>
+<span class="sourceLineNo">456</span><a name="line.456"></a>
+<span class="sourceLineNo">457</span>  /**<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   * @return the on-disk size of header + data part + checksum.<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   */<a name="line.459"></a>
+<span class="sourceLineNo">460</span>  public int getOnDiskSizeWithHeader() {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.461"></a>
+<span class="sourceLineNo">462</span>  }<a name="line.462"></a>
+<span class="sourceLineNo">463</span><a name="line.463"></a>
+<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
+<span class="sourceLineNo">465</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.465"></a>
+<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  int getOnDiskSizeWithoutHeader() {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return onDiskSizeWithoutHeader;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   int getUncompressedSizeWithoutHeader() {<a name="line.474"></a>
+<span class="sourceLineNo">475</span>    return uncompressedSizeWithoutHeader;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>  }<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>  /**<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   *         -1 if unknown<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   */<a name="line.481"></a>
+<span class="sourceLineNo">482</span>  long getPrevBlockOffset() {<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    return prevBlockOffset;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /**<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * is modified as side-effect.<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   */<a name="line.489"></a>
+<span class="sourceLineNo">490</span>  private void overwriteHeader() {<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    buf.rewind();<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    blockType.write(buf);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    buf.putLong(prevBlockOffset);<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.498"></a>
+<span class="sourceLineNo">499</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    }<a name="line.500"></a>
+<span class="sourceLineNo">501</span>  }<a name="line.501"></a>
+<span class="sourceLineNo">502</span><a name="line.502"></a>
+<span class="sourceLineNo">503</span>  /**<a name="line.503"><

<TRUNCATED>

[39/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/UniqueIndexMap.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/UniqueIndexMap.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/UniqueIndexMap.html
deleted file mode 100644
index f576e0c..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/class-use/UniqueIndexMap.html
+++ /dev/null
@@ -1,193 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>Uses of Class org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="Uses of Class org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/bucket/class-use/UniqueIndexMap.html" target="_top">Frames</a></li>
-<li><a href="UniqueIndexMap.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<div class="header">
-<h2 title="Uses of Class org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap" class="title">Uses of Class<br>org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap</h2>
-</div>
-<div class="classUseContainer">
-<ul class="blockList">
-<li class="blockList">
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
-<caption><span>Packages that use <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Package</th>
-<th class="colLast" scope="col">Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase.io.hfile.bucket">org.apache.hadoop.hbase.io.hfile.bucket</a></td>
-<td class="colLast">
-<div class="block">Provides <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>BucketCache</code></a>, an implementation of
- <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>BlockCache</code></a>.</div>
-</td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList">
-<ul class="blockList">
-<li class="blockList"><a name="org.apache.hadoop.hbase.io.hfile.bucket">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a> in <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html">org.apache.hadoop.hbase.io.hfile.bucket</a></h3>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
-<caption><span>Fields in <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html">org.apache.hadoop.hbase.io.hfile.bucket</a> declared as <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Field and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;</code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#deserialiserMap">deserialiserMap</a></span></code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html">org.apache.hadoop.hbase.io.hfile.bucket</a> with parameters of type <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>protected <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.BucketEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">deserializerReference</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><code>protected void</code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.BucketEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">setDeserialiserReference</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;deserializer,
-                        <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</code>&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><code><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a></code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.RAMQueueEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#writeToCache-org.apache.hadoop.hbase.io.hfile.bucket.IOEngine-org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-java.util.concurrent.atomic.LongAdder-">writeToCache</a></span>(<a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
-            <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a>&nbsp;bucketAllocator,
-            <a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap,
-            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true" title="class or interface in java.util.concurrent.atomic">LongAdder</a>&nbsp;realCacheSize)</code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/bucket/class-use/UniqueIndexMap.html" target="_top">Frames</a></li>
-<li><a href="UniqueIndexMap.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2018 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-frame.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-frame.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-frame.html
index 877954c..6518b6c 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-frame.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-frame.html
@@ -27,13 +27,13 @@
 <li><a href="BucketCache.SharedMemoryBucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">BucketCache.SharedMemoryBucketEntry</a></li>
 <li><a href="BucketCache.StatisticsThread.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">BucketCache.StatisticsThread</a></li>
 <li><a href="BucketCacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">BucketCacheStats</a></li>
+<li><a href="BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">BucketProtoUtils</a></li>
 <li><a href="ByteBufferIOEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">ByteBufferIOEngine</a></li>
 <li><a href="CachedEntryQueue.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">CachedEntryQueue</a></li>
 <li><a href="FileIOEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">FileIOEngine</a></li>
 <li><a href="FileIOEngine.FileReadAccessor.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">FileIOEngine.FileReadAccessor</a></li>
 <li><a href="FileIOEngine.FileWriteAccessor.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">FileIOEngine.FileWriteAccessor</a></li>
 <li><a href="FileMmapEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">FileMmapEngine</a></li>
-<li><a href="UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">UniqueIndexMap</a></li>
 <li><a href="UnsafeSharedMemoryBucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket" target="classFrame">UnsafeSharedMemoryBucketEntry</a></li>
 </ul>
 <h2 title="Exceptions">Exceptions</h2>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html
index f846ca7..b4cb626 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html
@@ -159,46 +159,44 @@
 </td>
 </tr>
 <tr class="altColor">
+<td class="colFirst"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketProtoUtils</a></td>
+<td class="colLast">&nbsp;</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">ByteBufferIOEngine</a></td>
 <td class="colLast">
 <div class="block">IO engine that stores data in memory using an array of ByteBuffers
  <a href="../../../../../../../org/apache/hadoop/hbase/util/ByteBufferArray.html" title="class in org.apache.hadoop.hbase.util"><code>ByteBufferArray</code></a>.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">CachedEntryQueue</a></td>
 <td class="colLast">
 <div class="block">A memory-bound queue that will grow until an element brings total size larger
  than maxSize.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">FileIOEngine</a></td>
 <td class="colLast">
 <div class="block">IO engine that stores data to a file on the local file system.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.FileReadAccessor.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">FileIOEngine.FileReadAccessor</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.FileWriteAccessor.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">FileIOEngine.FileWriteAccessor</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">FileMmapEngine</a></td>
 <td class="colLast">
 <div class="block">IO engine that stores data to a file on the local file system using memory mapping
  mechanism</div>
 </td>
 </tr>
-<tr class="altColor">
-<td class="colFirst"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;T&gt;</td>
-<td class="colLast">
-<div class="block">Map from type T to int and vice-versa.</div>
-</td>
-</tr>
 <tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UnsafeSharedMemoryBucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UnsafeSharedMemoryBucketEntry</a></td>
 <td class="colLast">&nbsp;</td>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-tree.html
index 98ae8fb..71319a8 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-tree.html
@@ -94,6 +94,7 @@
 </li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.bucket.<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">BucketCache.BucketEntryGroup</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.bucket.<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">BucketCache.RAMQueueEntry</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.bucket.<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">BucketProtoUtils</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.bucket.<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">ByteBufferIOEngine</span></a> (implements org.apache.hadoop.hbase.io.hfile.bucket.<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.bucket.<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">CachedEntryQueue</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">CacheStats</span></a>
@@ -129,7 +130,6 @@
 </li>
 </ul>
 </li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.bucket.<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="typeNameLink">UniqueIndexMap</span></a>&lt;T&gt; (implements java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)</li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-use.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-use.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-use.html
index 5c1ee38..4568069 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-use.html
@@ -199,11 +199,6 @@
  <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>BucketCache</code></a>.</div>
 </td>
 </tr>
-<tr class="altColor">
-<td class="colOne"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/class-use/UniqueIndexMap.html#org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>
-<div class="block">Map from type T to int and vice-versa.</div>
-</td>
-</tr>
 </tbody>
 </table>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
index 13046bb..4a8b8a8 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
@@ -423,10 +423,15 @@
 </tr>
 <tbody>
 <tr class="altColor">
+<td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentHashMap</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;</code></td>
+<td class="colLast"><span class="typeNameLabel">BucketProtoUtils.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#fromPB-java.util.Map-org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap-">fromPB</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;deserializers,
+      org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap&nbsp;backingMap)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">CachedEntryQueue.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.html#poll--">poll</a></span>()</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">CachedEntryQueue.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.html#pollLast--">pollLast</a></span>()</code>&nbsp;</td>
 </tr>
@@ -518,6 +523,10 @@
 <td class="colLast"><span class="typeNameLabel">BucketCache.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#returnBlock-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-org.apache.hadoop.hbase.io.hfile.Cacheable-">returnBlock</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
            <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;block)</code>&nbsp;</td>
 </tr>
+<tr class="rowColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockCacheKey</code></td>
+<td class="colLast"><span class="typeNameLabel">BucketProtoUtils.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-">toPB</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;key)</code>&nbsp;</td>
+</tr>
 </tbody>
 </table>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
@@ -537,6 +546,10 @@
 <div class="block">Attempt to add the specified entry to this queue.</div>
 </td>
 </tr>
+<tr class="altColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BackingMap</code></td>
+<td class="colLast"><span class="typeNameLabel">BucketProtoUtils.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-java.util.Map-">toPB</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;&nbsp;backingMap)</code>&nbsp;</td>
+</tr>
 </tbody>
 </table>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing constructors, and an explanation">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockPriority.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockPriority.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockPriority.html
index 2af5ec3..0f2830d 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockPriority.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockPriority.html
@@ -179,6 +179,19 @@ the order they are declared.</div>
 </tr>
 </tbody>
 </table>
+<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
+<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html">org.apache.hadoop.hbase.io.hfile.bucket</a> with parameters of type <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a></span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Method and Description</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockPriority</code></td>
+<td class="colLast"><span class="typeNameLabel">BucketProtoUtils.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.BlockPriority-">toPB</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a>&nbsp;p)</code>&nbsp;</td>
+</tr>
+</tbody>
+</table>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html
index ba12adf..e12ff9d 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html
@@ -94,6 +94,13 @@
 </td>
 </tr>
 <tr class="altColor">
+<td class="colFirst"><a href="#org.apache.hadoop.hbase.io.hfile.bucket">org.apache.hadoop.hbase.io.hfile.bucket</a></td>
+<td class="colLast">
+<div class="block">Provides <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>BucketCache</code></a>, an implementation of
+ <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>BlockCache</code></a>.</div>
+</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><a href="#org.apache.hadoop.hbase.regionserver">org.apache.hadoop.hbase.regionserver</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
@@ -440,6 +447,37 @@ the order they are declared.</div>
 </tbody>
 </table>
 </li>
+<li class="blockList"><a name="org.apache.hadoop.hbase.io.hfile.bucket">
+<!--   -->
+</a>
+<h3>Uses of <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a> in <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html">org.apache.hadoop.hbase.io.hfile.bucket</a></h3>
+<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
+<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html">org.apache.hadoop.hbase.io.hfile.bucket</a> that return <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a></span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Method and Description</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a></code></td>
+<td class="colLast"><span class="typeNameLabel">BucketProtoUtils.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#fromPb-org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType-">fromPb</a></span>(org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType&nbsp;blockType)</code>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
+<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html">org.apache.hadoop.hbase.io.hfile.bucket</a> with parameters of type <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a></span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Method and Description</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos.BlockType</code></td>
+<td class="colLast"><span class="typeNameLabel">BucketProtoUtils.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.html#toPB-org.apache.hadoop.hbase.io.hfile.BlockType-">toPB</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</code>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+</li>
 <li class="blockList"><a name="org.apache.hadoop.hbase.regionserver">
 <!--   -->
 </a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.MemoryType.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.MemoryType.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.MemoryType.html
index ceeb7cc..a119ac9 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.MemoryType.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.MemoryType.html
@@ -154,6 +154,12 @@ the order they are declared.</div>
            boolean&nbsp;reuse,
            <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a>&nbsp;memType)</code>&nbsp;</td>
 </tr>
+<tr class="rowColor">
+<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
+<td class="colLast"><span class="typeNameLabel">HFileBlock.BlockDeserializer.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-boolean-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType-">deserialize</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buf,
+           boolean&nbsp;reuse,
+           <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a>&nbsp;memType)</code>&nbsp;</td>
+</tr>
 </tbody>
 </table>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing constructors, and an explanation">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
index 4a3c74b..25228f7 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
@@ -156,7 +156,7 @@
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code>(package private) static <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
+<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">HFileBlock.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#BLOCK_DESERIALIZER">BLOCK_DESERIALIZER</a></span></code>
 <div class="block">Used deserializing blocks from Cache.</div>
 </td>
@@ -239,7 +239,7 @@
 <tr class="altColor">
 <td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">CacheableDeserializerIdManager.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#getDeserializer-int-">getDeserializer</a></span>(int&nbsp;id)</code>
-<div class="block">Get the cacheable deserializer as the given identifier Id</div>
+<div class="block">Get the cacheable deserializer registered at the given identifier Id.</div>
 </td>
 </tr>
 </tbody>
@@ -358,8 +358,9 @@
 <tr class="altColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><span class="typeNameLabel">CacheableDeserializerIdManager.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#registerDeserializer-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-">registerDeserializer</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;cd)</code>
-<div class="block">Register the given cacheable deserializer and generate an unique identifier
- id for it</div>
+<div class="block">Register the given <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>Cacheable</code></a> -- usually an hfileblock instance, these implement
+ the Cacheable Interface -- deserializer and generate an unique identifier id for it and return
+ this as our result.</div>
 </td>
 </tr>
 </tbody>
@@ -460,7 +461,7 @@
 <tbody>
 <tr class="altColor">
 <td class="colFirst"><code>protected <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.BucketEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">deserializerReference</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</code>&nbsp;</td>
+<td class="colLast"><span class="typeNameLabel">BucketCache.BucketEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference--">deserializerReference</a></span>()</code>&nbsp;</td>
 </tr>
 </tbody>
 </table>
@@ -546,8 +547,7 @@
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>protected void</code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.BucketEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">setDeserialiserReference</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;deserializer,
-                        <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</code>&nbsp;</td>
+<td class="colLast"><span class="typeNameLabel">BucketCache.BucketEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-">setDeserialiserReference</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;deserializer)</code>&nbsp;</td>
 </tr>
 </tbody>
 </table>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializer.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializer.html
index b78e5ea..864919a 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializer.html
@@ -105,6 +105,19 @@
 <!--   -->
 </a>
 <h3>Uses of <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a> in <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/package-summary.html">org.apache.hadoop.hbase.io.hfile</a></h3>
+<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
+<caption><span>Classes in <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/package-summary.html">org.apache.hadoop.hbase.io.hfile</a> that implement <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a></span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Class and Description</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colFirst"><code>static class&nbsp;</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockDeserializer</a></span></code>&nbsp;</td>
+</tr>
+</tbody>
+</table>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
 <caption><span>Fields in <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/package-summary.html">org.apache.hadoop.hbase.io.hfile</a> declared as <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a></span><span class="tabEnd">&nbsp;</span></caption>
 <tr>
@@ -113,7 +126,7 @@
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code>(package private) static <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
+<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">HFileBlock.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#BLOCK_DESERIALIZER">BLOCK_DESERIALIZER</a></span></code>
 <div class="block">Used deserializing blocks from Cache.</div>
 </td>
@@ -153,7 +166,7 @@
 <tr class="altColor">
 <td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">CacheableDeserializerIdManager.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#getDeserializer-int-">getDeserializer</a></span>(int&nbsp;id)</code>
-<div class="block">Get the cacheable deserializer as the given identifier Id</div>
+<div class="block">Get the cacheable deserializer registered at the given identifier Id.</div>
 </td>
 </tr>
 </tbody>
@@ -168,8 +181,9 @@
 <tr class="altColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><span class="typeNameLabel">CacheableDeserializerIdManager.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html#registerDeserializer-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-">registerDeserializer</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;cd)</code>
-<div class="block">Register the given cacheable deserializer and generate an unique identifier
- id for it</div>
+<div class="block">Register the given <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>Cacheable</code></a> -- usually an hfileblock instance, these implement
+ the Cacheable Interface -- deserializer and generate an unique identifier id for it and return
+ this as our result.</div>
 </td>
 </tr>
 </tbody>
@@ -188,7 +202,7 @@
 <tbody>
 <tr class="altColor">
 <td class="colFirst"><code>protected <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.BucketEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">deserializerReference</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</code>&nbsp;</td>
+<td class="colLast"><span class="typeNameLabel">BucketCache.BucketEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#deserializerReference--">deserializerReference</a></span>()</code>&nbsp;</td>
 </tr>
 </tbody>
 </table>
@@ -229,8 +243,7 @@
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>protected void</code></td>
-<td class="colLast"><span class="typeNameLabel">BucketCache.BucketEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap-">setDeserialiserReference</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;deserializer,
-                        <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</code>&nbsp;</td>
+<td class="colLast"><span class="typeNameLabel">BucketCache.BucketEntry.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#setDeserialiserReference-org.apache.hadoop.hbase.io.hfile.CacheableDeserializer-">setDeserialiserReference</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;deserializer)</code>&nbsp;</td>
 </tr>
 </tbody>
 </table>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.BlockDeserializer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.BlockDeserializer.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.BlockDeserializer.html
new file mode 100644
index 0000000..ae621dd
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.BlockDeserializer.html
@@ -0,0 +1,125 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<!-- NewPage -->
+<html lang="en">
+<head>
+<!-- Generated by javadoc -->
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>Uses of Class org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockDeserializer (Apache HBase 3.0.0-SNAPSHOT API)</title>
+<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
+<script type="text/javascript" src="../../../../../../../script.js"></script>
+</head>
+<body>
+<script type="text/javascript"><!--
+    try {
+        if (location.href.indexOf('is-external=true') == -1) {
+            parent.document.title="Uses of Class org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockDeserializer (Apache HBase 3.0.0-SNAPSHOT API)";
+        }
+    }
+    catch(err) {
+    }
+//-->
+</script>
+<noscript>
+<div>JavaScript is disabled on your browser.</div>
+</noscript>
+<!-- ========= START OF TOP NAVBAR ======= -->
+<div class="topNav"><a name="navbar.top">
+<!--   -->
+</a>
+<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
+<a name="navbar.top.firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
+<li><a href="../package-summary.html">Package</a></li>
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile">Class</a></li>
+<li class="navBarCell1Rev">Use</li>
+<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
+<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
+<li><a href="../../../../../../../index-all.html">Index</a></li>
+<li><a href="../../../../../../../help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li>Prev</li>
+<li>Next</li>
+</ul>
+<ul class="navList">
+<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.BlockDeserializer.html" target="_top">Frames</a></li>
+<li><a href="HFileBlock.BlockDeserializer.html" target="_top">No&nbsp;Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_top">
+<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_top");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<a name="skip.navbar.top">
+<!--   -->
+</a></div>
+<!-- ========= END OF TOP NAVBAR ========= -->
+<div class="header">
+<h2 title="Uses of Class org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockDeserializer" class="title">Uses of Class<br>org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockDeserializer</h2>
+</div>
+<div class="classUseContainer">No usage of org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockDeserializer</div>
+<!-- ======= START OF BOTTOM NAVBAR ====== -->
+<div class="bottomNav"><a name="navbar.bottom">
+<!--   -->
+</a>
+<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
+<a name="navbar.bottom.firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
+<li><a href="../package-summary.html">Package</a></li>
+<li><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile">Class</a></li>
+<li class="navBarCell1Rev">Use</li>
+<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
+<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
+<li><a href="../../../../../../../index-all.html">Index</a></li>
+<li><a href="../../../../../../../help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li>Prev</li>
+<li>Next</li>
+</ul>
+<ul class="navList">
+<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.BlockDeserializer.html" target="_top">Frames</a></li>
+<li><a href="HFileBlock.BlockDeserializer.html" target="_top">No&nbsp;Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_bottom">
+<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_bottom");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<a name="skip.navbar.bottom">
+<!--   -->
+</a></div>
+<!-- ======== END OF BOTTOM NAVBAR ======= -->
+<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2018 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
+</body>
+</html>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html
index 06a94a2..b98a321 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html
@@ -151,6 +151,16 @@
 <td class="colLast"><span class="typeNameLabel">HFileBlock.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#deepClone--">deepClone</a></span>()</code>&nbsp;</td>
 </tr>
 <tr class="altColor">
+<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
+<td class="colLast"><span class="typeNameLabel">HFileBlock.BlockDeserializer.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-">deserialize</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;b)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
+<td class="colLast"><span class="typeNameLabel">HFileBlock.BlockDeserializer.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html#deserialize-org.apache.hadoop.hbase.nio.ByteBuff-boolean-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType-">deserialize</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buf,
+           boolean&nbsp;reuse,
+           <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a>&nbsp;memType)</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
 <td class="colLast"><span class="typeNameLabel">HFileBlock.Writer.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#getBlockForCaching-org.apache.hadoop.hbase.io.hfile.CacheConfig-">getBlockForCaching</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</code>
 <div class="block">Creates a new HFileBlock.</div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-frame.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-frame.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-frame.html
index 23e64fa..36fefd4 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-frame.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-frame.html
@@ -52,6 +52,7 @@
 <li><a href="HFile.FileInfo.html" title="class in org.apache.hadoop.hbase.io.hfile" target="classFrame">HFile.FileInfo</a></li>
 <li><a href="HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile" target="classFrame">HFile.WriterFactory</a></li>
 <li><a href="HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile" target="classFrame">HFileBlock</a></li>
+<li><a href="HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile" target="classFrame">HFileBlock.BlockDeserializer</a></li>
 <li><a href="HFileBlock.FSReaderImpl.html" title="class in org.apache.hadoop.hbase.io.hfile" target="classFrame">HFileBlock.FSReaderImpl</a></li>
 <li><a href="HFileBlock.Header.html" title="class in org.apache.hadoop.hbase.io.hfile" target="classFrame">HFileBlock.Header</a></li>
 <li><a href="HFileBlock.PrefetchedHeader.html" title="class in org.apache.hadoop.hbase.io.hfile" target="classFrame">HFileBlock.PrefetchedHeader</a></li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-summary.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-summary.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-summary.html
index 4402cee..c7e06bd 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-summary.html
@@ -226,8 +226,7 @@
 <tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheableDeserializerIdManager</a></td>
 <td class="colLast">
-<div class="block">This class is used to manage the identifiers for
- <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>CacheableDeserializer</code></a></div>
+<div class="block">This class is used to manage the identifiers for <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>CacheableDeserializer</code></a>.</div>
 </td>
 </tr>
 <tr class="altColor">
@@ -320,28 +319,32 @@
 </td>
 </tr>
 <tr class="rowColor">
+<td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockDeserializer</a></td>
+<td class="colLast">&nbsp;</td>
+</tr>
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReaderImpl</a></td>
 <td class="colLast">
 <div class="block">Reads version 2 HFile blocks from the filesystem.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Header.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.Header</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.PrefetchedHeader</a></td>
 <td class="colLast">
 <div class="block">Data-structure to use caching the header of the NEXT block.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer</a></td>
 <td class="colLast">
 <div class="block">Unified version 2 <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> block writer.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex</a></td>
 <td class="colLast">
 <div class="block">Provides functionality to write (<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFileBlockIndex.BlockIndexWriter</code></a>) and read
@@ -349,32 +352,32 @@
  single-level and multi-level block indexes.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexChunk.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexChunk</a></td>
 <td class="colLast">
 <div class="block">A single chunk of the block index in the process of writing.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexReader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexReader</a></td>
 <td class="colLast">
 <div class="block">The reader will always hold the root level index in the memory.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexWriter</a></td>
 <td class="colLast">
 <div class="block">Writes the block index into the output stream.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.ByteArrayKeyBlockIndexReader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.ByteArrayKeyBlockIndexReader</a></td>
 <td class="colLast">
 <div class="block">An implementation of the BlockIndexReader that deals with block keys which are plain
  byte[] like MetaBlock or the Bloom Block for ROW bloom.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.CellBasedKeyBlockIndexReader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.CellBasedKeyBlockIndexReader</a></td>
 <td class="colLast">
 <div class="block">An implementation of the BlockIndexReader that deals with block keys which are the key
@@ -382,79 +385,79 @@
  This needs a comparator to work with the Cells</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a></td>
 <td class="colLast">
 <div class="block">This carries the information on some of the meta data about the HFile.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContextBuilder</a></td>
 <td class="colLast">
 <div class="block">A builder that helps in building up the HFileContext</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoderImpl</a></td>
 <td class="colLast">
 <div class="block">Do different kinds of data block encoding according to column family
  options.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html" title="class in org.apache.hadoop.hbase.io.hfile">HFilePrettyPrinter</a></td>
 <td class="colLast">
 <div class="block">Implements pretty-printing functionality for <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>s.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html" title="class in org.apache.hadoop.hbase.io.hfile">HFilePrettyPrinter.KeyValueStatsCollector</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html" title="class in org.apache.hadoop.hbase.io.hfile">HFilePrettyPrinter.SimpleReporter</a></td>
 <td class="colLast">
 <div class="block">Almost identical to ConsoleReporter, but extending ScheduledReporter,
  as extending ConsoleReporter in this version of dropwizard is now too much trouble.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.Builder.html" title="class in org.apache.hadoop.hbase.io.hfile">HFilePrettyPrinter.SimpleReporter.Builder</a></td>
 <td class="colLast">
 <div class="block">A builder for <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFilePrettyPrinter.SimpleReporter</code></a> instances.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileReaderImpl</a></td>
 <td class="colLast">
 <div class="block">Implementation that can handle all hfile versions of <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>HFile.Reader</code></a>.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileReaderImpl.EncodedScanner</a></td>
 <td class="colLast">
 <div class="block">Scanner that operates on encoded data blocks.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileReaderImpl.HFileScannerImpl</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileUtil.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileUtil</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileWriterImpl</a></td>
 <td class="colLast">
 <div class="block">Common functionality needed by all versions of <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> writers.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/InclusiveCombinedBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">InclusiveCombinedBlockCache</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache</a></td>
 <td class="colLast">
 <div class="block">A block cache implementation that is memory-aware using <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io"><code>HeapSize</code></a>,
@@ -463,50 +466,50 @@
  constant-time <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html#cacheBlock-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-org.apache.hadoop.hbase.io.hfile.Cacheable-boolean-"><code>LruBlockCache.cacheBlock(org.apache.hadoop.hbase.io.hfile.BlockCacheKey, org.apache.hadoop.hbase.io.hfile.Cacheable, boolean)</code></a> and <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html#getBlock-org.apache.hadoop.hbase.io.hfile.BlockCacheKey-boolean-boolean-boolean-"><code>LruBlockCache.getBlock(org.apache.hadoop.hbase.io.hfile.BlockCacheKey, boolean, boolean, boolean)</code></a> operations.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.EvictionThread.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache.EvictionThread</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.StatisticsThread.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache.StatisticsThread</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruCachedBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">LruCachedBlock</a></td>
 <td class="colLast">
 <div class="block">Represents an entry in the <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>LruBlockCache</code></a>.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.html" title="class in org.apache.hadoop.hbase.io.hfile">LruCachedBlockQueue</a></td>
 <td class="colLast">
 <div class="block">A memory-bound queue that will grow until an element brings
  total size &gt;= maxSize.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">MemcachedBlockCache</a></td>
 <td class="colLast">
 <div class="block">Class to store blocks into memcached.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.HFileBlockTranscoder.html" title="class in org.apache.hadoop.hbase.io.hfile">MemcachedBlockCache.HFileBlockTranscoder</a></td>
 <td class="colLast">
 <div class="block">Class to encode and decode an HFileBlock to and from memecached's resulting byte arrays.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.html" title="class in org.apache.hadoop.hbase.io.hfile">NoOpDataBlockEncoder</a></td>
 <td class="colLast">
 <div class="block">Does not perform any kind of encoding/decoding.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.NoneEncodingState.html" title="class in org.apache.hadoop.hbase.io.hfile">NoOpDataBlockEncoder.NoneEncodingState</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.html" title="class in org.apache.hadoop.hbase.io.hfile">PrefetchExecutor</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index ed1fd87..b844aff 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -129,6 +129,7 @@
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFile.FileInfo</span></a> (implements java.util.<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;K,V&gt;)</li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFile.WriterFactory</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock</span></a> (implements org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>)</li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.BlockDeserializer</span></a> (implements org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;T&gt;)</li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.FSReaderImpl</span></a> (implements org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Header.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Header</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.PrefetchedHeader</span></a></li>
@@ -273,11 +274,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Cacheable.MemoryType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">CacheConfig.ExternalBlockCaches</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">CacheConfig.ExternalBlockCaches</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Cacheable.MemoryType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-use.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-use.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-use.html
index 8c8a497..109c123 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-use.html
@@ -512,19 +512,24 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 <td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/class-use/BlockPriority.html#org.apache.hadoop.hbase.io.hfile.bucket">BlockPriority</a>&nbsp;</td>
 </tr>
 <tr class="rowColor">
+<td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html#org.apache.hadoop.hbase.io.hfile.bucket">BlockType</a>
+<div class="block">Various types of HFile blocks.</div>
+</td>
+</tr>
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html#org.apache.hadoop.hbase.io.hfile.bucket">Cacheable</a>
 <div class="block">Cacheable is an interface that allows for an object to be cached.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializer.html#org.apache.hadoop.hbase.io.hfile.bucket">CacheableDeserializer</a>
 <div class="block">Interface for a deserializer.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html#org.apache.hadoop.hbase.io.hfile.bucket">CachedBlock</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/class-use/CacheStats.html#org.apache.hadoop.hbase.io.hfile.bucket">CacheStats</a>
 <div class="block">Class that implements cache metrics.</div>
 </td>


[32/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.NewestLogFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.NewestLogFilter.html b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.NewestLogFilter.html
index ef680de..f919922 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.NewestLogFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.NewestLogFilter.html
@@ -46,120 +46,120 @@
 <span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.backup.util.BackupUtils;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.client.Admin;<a name="line.39"></a>
 <span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.slf4j.Logger;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.slf4j.LoggerFactory;<a name="line.45"></a>
-<span class="sourceLineNo">046</span><a name="line.46"></a>
-<span class="sourceLineNo">047</span>/**<a name="line.47"></a>
-<span class="sourceLineNo">048</span> * After a full backup was created, the incremental backup will only store the changes made after<a name="line.48"></a>
-<span class="sourceLineNo">049</span> * the last full or incremental backup. Creating the backup copies the logfiles in .logs and<a name="line.49"></a>
-<span class="sourceLineNo">050</span> * .oldlogs since the last backup timestamp.<a name="line.50"></a>
-<span class="sourceLineNo">051</span> */<a name="line.51"></a>
-<span class="sourceLineNo">052</span>@InterfaceAudience.Private<a name="line.52"></a>
-<span class="sourceLineNo">053</span>public class IncrementalBackupManager extends BackupManager {<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  public static final Logger LOG = LoggerFactory.getLogger(IncrementalBackupManager.class);<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  public IncrementalBackupManager(Connection conn, Configuration conf) throws IOException {<a name="line.56"></a>
-<span class="sourceLineNo">057</span>    super(conn, conf);<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  }<a name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>  /**<a name="line.60"></a>
-<span class="sourceLineNo">061</span>   * Obtain the list of logs that need to be copied out for this incremental backup. The list is set<a name="line.61"></a>
-<span class="sourceLineNo">062</span>   * in BackupInfo.<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   * @return The new HashMap of RS log time stamps after the log roll for this incremental backup.<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * @throws IOException exception<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   */<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  public HashMap&lt;String, Long&gt; getIncrBackupLogFileMap() throws IOException {<a name="line.66"></a>
-<span class="sourceLineNo">067</span>    List&lt;String&gt; logList;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    HashMap&lt;String, Long&gt; newTimestamps;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    HashMap&lt;String, Long&gt; previousTimestampMins;<a name="line.69"></a>
-<span class="sourceLineNo">070</span><a name="line.70"></a>
-<span class="sourceLineNo">071</span>    String savedStartCode = readBackupStartCode();<a name="line.71"></a>
-<span class="sourceLineNo">072</span><a name="line.72"></a>
-<span class="sourceLineNo">073</span>    // key: tableName<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    // value: &lt;RegionServer,PreviousTimeStamp&gt;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    HashMap&lt;TableName, HashMap&lt;String, Long&gt;&gt; previousTimestampMap = readLogTimestampMap();<a name="line.75"></a>
-<span class="sourceLineNo">076</span><a name="line.76"></a>
-<span class="sourceLineNo">077</span>    previousTimestampMins = BackupUtils.getRSLogTimestampMins(previousTimestampMap);<a name="line.77"></a>
-<span class="sourceLineNo">078</span><a name="line.78"></a>
-<span class="sourceLineNo">079</span>    if (LOG.isDebugEnabled()) {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>      LOG.debug("StartCode " + savedStartCode + "for backupID " + backupInfo.getBackupId());<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    }<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    // get all new log files from .logs and .oldlogs after last TS and before new timestamp<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    if (savedStartCode == null || previousTimestampMins == null<a name="line.83"></a>
-<span class="sourceLineNo">084</span>        || previousTimestampMins.isEmpty()) {<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      throw new IOException(<a name="line.85"></a>
-<span class="sourceLineNo">086</span>          "Cannot read any previous back up timestamps from backup system table. "<a name="line.86"></a>
-<span class="sourceLineNo">087</span>              + "In order to create an incremental backup, at least one full backup is needed.");<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    }<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>    LOG.info("Execute roll log procedure for incremental backup ...");<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    HashMap&lt;String, String&gt; props = new HashMap&lt;&gt;();<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    props.put("backupRoot", backupInfo.getBackupRootDir());<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>    try (Admin admin = conn.getAdmin()) {<a name="line.94"></a>
-<span class="sourceLineNo">095</span>      admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,<a name="line.95"></a>
-<span class="sourceLineNo">096</span>        LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    }<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    newTimestamps = readRegionServerLastLogRollResult();<a name="line.98"></a>
-<span class="sourceLineNo">099</span><a name="line.99"></a>
-<span class="sourceLineNo">100</span>    logList = getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, savedStartCode);<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    List&lt;WALItem&gt; logFromSystemTable =<a name="line.101"></a>
-<span class="sourceLineNo">102</span>        getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, getBackupInfo()<a name="line.102"></a>
-<span class="sourceLineNo">103</span>            .getBackupRootDir());<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    logList = excludeAlreadyBackedUpWALs(logList, logFromSystemTable);<a name="line.104"></a>
-<span class="sourceLineNo">105</span>    backupInfo.setIncrBackupFileList(logList);<a name="line.105"></a>
-<span class="sourceLineNo">106</span><a name="line.106"></a>
-<span class="sourceLineNo">107</span>    return newTimestamps;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  }<a name="line.108"></a>
-<span class="sourceLineNo">109</span><a name="line.109"></a>
-<span class="sourceLineNo">110</span>  /**<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * Get list of WAL files eligible for incremental backup.<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   *<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   * @return list of WAL files<a name="line.113"></a>
-<span class="sourceLineNo">114</span>   * @throws IOException if getting the list of WAL files fails<a name="line.114"></a>
-<span class="sourceLineNo">115</span>   */<a name="line.115"></a>
-<span class="sourceLineNo">116</span>  public List&lt;String&gt; getIncrBackupLogFileList() throws IOException {<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    List&lt;String&gt; logList;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    HashMap&lt;String, Long&gt; newTimestamps;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    HashMap&lt;String, Long&gt; previousTimestampMins;<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>    String savedStartCode = readBackupStartCode();<a name="line.121"></a>
-<span class="sourceLineNo">122</span><a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // key: tableName<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    // value: &lt;RegionServer,PreviousTimeStamp&gt;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    HashMap&lt;TableName, HashMap&lt;String, Long&gt;&gt; previousTimestampMap = readLogTimestampMap();<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>    previousTimestampMins = BackupUtils.getRSLogTimestampMins(previousTimestampMap);<a name="line.127"></a>
-<span class="sourceLineNo">128</span><a name="line.128"></a>
-<span class="sourceLineNo">129</span>    if (LOG.isDebugEnabled()) {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>      LOG.debug("StartCode " + savedStartCode + "for backupID " + backupInfo.getBackupId());<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    }<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    // get all new log files from .logs and .oldlogs after last TS and before new timestamp<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    if (savedStartCode == null || previousTimestampMins == null<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        || previousTimestampMins.isEmpty()) {<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      throw new IOException(<a name="line.135"></a>
-<span class="sourceLineNo">136</span>          "Cannot read any previous back up timestamps from backup system table. "<a name="line.136"></a>
-<span class="sourceLineNo">137</span>              + "In order to create an incremental backup, at least one full backup is needed.");<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    }<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>    newTimestamps = readRegionServerLastLogRollResult();<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    logList = getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, savedStartCode);<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    List&lt;WALItem&gt; logFromSystemTable =<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, getBackupInfo()<a name="line.144"></a>
-<span class="sourceLineNo">145</span>            .getBackupRootDir());<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>    logList = excludeAlreadyBackedUpWALs(logList, logFromSystemTable);<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    backupInfo.setIncrBackupFileList(logList);<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    return logList;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.slf4j.Logger;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.slf4j.LoggerFactory;<a name="line.46"></a>
+<span class="sourceLineNo">047</span><a name="line.47"></a>
+<span class="sourceLineNo">048</span>/**<a name="line.48"></a>
+<span class="sourceLineNo">049</span> * After a full backup was created, the incremental backup will only store the changes made after<a name="line.49"></a>
+<span class="sourceLineNo">050</span> * the last full or incremental backup. Creating the backup copies the logfiles in .logs and<a name="line.50"></a>
+<span class="sourceLineNo">051</span> * .oldlogs since the last backup timestamp.<a name="line.51"></a>
+<span class="sourceLineNo">052</span> */<a name="line.52"></a>
+<span class="sourceLineNo">053</span>@InterfaceAudience.Private<a name="line.53"></a>
+<span class="sourceLineNo">054</span>public class IncrementalBackupManager extends BackupManager {<a name="line.54"></a>
+<span class="sourceLineNo">055</span>  public static final Logger LOG = LoggerFactory.getLogger(IncrementalBackupManager.class);<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>  public IncrementalBackupManager(Connection conn, Configuration conf) throws IOException {<a name="line.57"></a>
+<span class="sourceLineNo">058</span>    super(conn, conf);<a name="line.58"></a>
+<span class="sourceLineNo">059</span>  }<a name="line.59"></a>
+<span class="sourceLineNo">060</span><a name="line.60"></a>
+<span class="sourceLineNo">061</span>  /**<a name="line.61"></a>
+<span class="sourceLineNo">062</span>   * Obtain the list of logs that need to be copied out for this incremental backup. The list is set<a name="line.62"></a>
+<span class="sourceLineNo">063</span>   * in BackupInfo.<a name="line.63"></a>
+<span class="sourceLineNo">064</span>   * @return The new HashMap of RS log time stamps after the log roll for this incremental backup.<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   * @throws IOException exception<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   */<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  public HashMap&lt;String, Long&gt; getIncrBackupLogFileMap() throws IOException {<a name="line.67"></a>
+<span class="sourceLineNo">068</span>    List&lt;String&gt; logList;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>    HashMap&lt;String, Long&gt; newTimestamps;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    HashMap&lt;String, Long&gt; previousTimestampMins;<a name="line.70"></a>
+<span class="sourceLineNo">071</span><a name="line.71"></a>
+<span class="sourceLineNo">072</span>    String savedStartCode = readBackupStartCode();<a name="line.72"></a>
+<span class="sourceLineNo">073</span><a name="line.73"></a>
+<span class="sourceLineNo">074</span>    // key: tableName<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    // value: &lt;RegionServer,PreviousTimeStamp&gt;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    HashMap&lt;TableName, HashMap&lt;String, Long&gt;&gt; previousTimestampMap = readLogTimestampMap();<a name="line.76"></a>
+<span class="sourceLineNo">077</span><a name="line.77"></a>
+<span class="sourceLineNo">078</span>    previousTimestampMins = BackupUtils.getRSLogTimestampMins(previousTimestampMap);<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>    if (LOG.isDebugEnabled()) {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>      LOG.debug("StartCode " + savedStartCode + "for backupID " + backupInfo.getBackupId());<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    }<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    // get all new log files from .logs and .oldlogs after last TS and before new timestamp<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    if (savedStartCode == null || previousTimestampMins == null<a name="line.84"></a>
+<span class="sourceLineNo">085</span>        || previousTimestampMins.isEmpty()) {<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      throw new IOException(<a name="line.86"></a>
+<span class="sourceLineNo">087</span>          "Cannot read any previous back up timestamps from backup system table. "<a name="line.87"></a>
+<span class="sourceLineNo">088</span>              + "In order to create an incremental backup, at least one full backup is needed.");<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    }<a name="line.89"></a>
+<span class="sourceLineNo">090</span><a name="line.90"></a>
+<span class="sourceLineNo">091</span>    LOG.info("Execute roll log procedure for incremental backup ...");<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    HashMap&lt;String, String&gt; props = new HashMap&lt;&gt;();<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    props.put("backupRoot", backupInfo.getBackupRootDir());<a name="line.93"></a>
+<span class="sourceLineNo">094</span><a name="line.94"></a>
+<span class="sourceLineNo">095</span>    try (Admin admin = conn.getAdmin()) {<a name="line.95"></a>
+<span class="sourceLineNo">096</span>      admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,<a name="line.96"></a>
+<span class="sourceLineNo">097</span>        LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    }<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    newTimestamps = readRegionServerLastLogRollResult();<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>    logList = getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, savedStartCode);<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    List&lt;WALItem&gt; logFromSystemTable =<a name="line.102"></a>
+<span class="sourceLineNo">103</span>        getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, getBackupInfo()<a name="line.103"></a>
+<span class="sourceLineNo">104</span>            .getBackupRootDir());<a name="line.104"></a>
+<span class="sourceLineNo">105</span>    logList = excludeAlreadyBackedUpAndProcV2WALs(logList, logFromSystemTable);<a name="line.105"></a>
+<span class="sourceLineNo">106</span>    backupInfo.setIncrBackupFileList(logList);<a name="line.106"></a>
+<span class="sourceLineNo">107</span><a name="line.107"></a>
+<span class="sourceLineNo">108</span>    return newTimestamps;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
+<span class="sourceLineNo">110</span><a name="line.110"></a>
+<span class="sourceLineNo">111</span>  /**<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * Get list of WAL files eligible for incremental backup.<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   *<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * @return list of WAL files<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   * @throws IOException if getting the list of WAL files fails<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   */<a name="line.116"></a>
+<span class="sourceLineNo">117</span>  public List&lt;String&gt; getIncrBackupLogFileList() throws IOException {<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    List&lt;String&gt; logList;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    HashMap&lt;String, Long&gt; newTimestamps;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    HashMap&lt;String, Long&gt; previousTimestampMins;<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>    String savedStartCode = readBackupStartCode();<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // key: tableName<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // value: &lt;RegionServer,PreviousTimeStamp&gt;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    HashMap&lt;TableName, HashMap&lt;String, Long&gt;&gt; previousTimestampMap = readLogTimestampMap();<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>    previousTimestampMins = BackupUtils.getRSLogTimestampMins(previousTimestampMap);<a name="line.128"></a>
+<span class="sourceLineNo">129</span><a name="line.129"></a>
+<span class="sourceLineNo">130</span>    if (LOG.isDebugEnabled()) {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>      LOG.debug("StartCode " + savedStartCode + "for backupID " + backupInfo.getBackupId());<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    }<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    // get all new log files from .logs and .oldlogs after last TS and before new timestamp<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    if (savedStartCode == null || previousTimestampMins == null<a name="line.134"></a>
+<span class="sourceLineNo">135</span>        || previousTimestampMins.isEmpty()) {<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      throw new IOException(<a name="line.136"></a>
+<span class="sourceLineNo">137</span>          "Cannot read any previous back up timestamps from backup system table. "<a name="line.137"></a>
+<span class="sourceLineNo">138</span>              + "In order to create an incremental backup, at least one full backup is needed.");<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    }<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>    newTimestamps = readRegionServerLastLogRollResult();<a name="line.141"></a>
+<span class="sourceLineNo">142</span><a name="line.142"></a>
+<span class="sourceLineNo">143</span>    logList = getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, savedStartCode);<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    List&lt;WALItem&gt; logFromSystemTable =<a name="line.144"></a>
+<span class="sourceLineNo">145</span>        getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, getBackupInfo()<a name="line.145"></a>
+<span class="sourceLineNo">146</span>            .getBackupRootDir());<a name="line.146"></a>
+<span class="sourceLineNo">147</span><a name="line.147"></a>
+<span class="sourceLineNo">148</span>    logList = excludeAlreadyBackedUpAndProcV2WALs(logList, logFromSystemTable);<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    backupInfo.setIncrBackupFileList(logList);<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>    return logList;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>  }<a name="line.152"></a>
 <span class="sourceLineNo">153</span><a name="line.153"></a>
-<span class="sourceLineNo">154</span>  private List&lt;String&gt; excludeAlreadyBackedUpWALs(List&lt;String&gt; logList,<a name="line.154"></a>
+<span class="sourceLineNo">154</span>  private List&lt;String&gt; excludeAlreadyBackedUpAndProcV2WALs(List&lt;String&gt; logList,<a name="line.154"></a>
 <span class="sourceLineNo">155</span>      List&lt;WALItem&gt; logFromSystemTable) {<a name="line.155"></a>
 <span class="sourceLineNo">156</span>    Set&lt;String&gt; walFileNameSet = convertToSet(logFromSystemTable);<a name="line.156"></a>
 <span class="sourceLineNo">157</span><a name="line.157"></a>
@@ -168,7 +168,7 @@
 <span class="sourceLineNo">160</span>      Path p = new Path(logList.get(i));<a name="line.160"></a>
 <span class="sourceLineNo">161</span>      String name  = p.getName();<a name="line.161"></a>
 <span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>      if (walFileNameSet.contains(name)) {<a name="line.163"></a>
+<span class="sourceLineNo">163</span>      if (walFileNameSet.contains(name) || name.startsWith(WALProcedureStore.LOG_PREFIX)) {<a name="line.163"></a>
 <span class="sourceLineNo">164</span>        continue;<a name="line.164"></a>
 <span class="sourceLineNo">165</span>      }<a name="line.165"></a>
 <span class="sourceLineNo">166</span><a name="line.166"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
index ef680de..f919922 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
@@ -46,120 +46,120 @@
 <span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.backup.util.BackupUtils;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.client.Admin;<a name="line.39"></a>
 <span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.slf4j.Logger;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.slf4j.LoggerFactory;<a name="line.45"></a>
-<span class="sourceLineNo">046</span><a name="line.46"></a>
-<span class="sourceLineNo">047</span>/**<a name="line.47"></a>
-<span class="sourceLineNo">048</span> * After a full backup was created, the incremental backup will only store the changes made after<a name="line.48"></a>
-<span class="sourceLineNo">049</span> * the last full or incremental backup. Creating the backup copies the logfiles in .logs and<a name="line.49"></a>
-<span class="sourceLineNo">050</span> * .oldlogs since the last backup timestamp.<a name="line.50"></a>
-<span class="sourceLineNo">051</span> */<a name="line.51"></a>
-<span class="sourceLineNo">052</span>@InterfaceAudience.Private<a name="line.52"></a>
-<span class="sourceLineNo">053</span>public class IncrementalBackupManager extends BackupManager {<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  public static final Logger LOG = LoggerFactory.getLogger(IncrementalBackupManager.class);<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  public IncrementalBackupManager(Connection conn, Configuration conf) throws IOException {<a name="line.56"></a>
-<span class="sourceLineNo">057</span>    super(conn, conf);<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  }<a name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>  /**<a name="line.60"></a>
-<span class="sourceLineNo">061</span>   * Obtain the list of logs that need to be copied out for this incremental backup. The list is set<a name="line.61"></a>
-<span class="sourceLineNo">062</span>   * in BackupInfo.<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   * @return The new HashMap of RS log time stamps after the log roll for this incremental backup.<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * @throws IOException exception<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   */<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  public HashMap&lt;String, Long&gt; getIncrBackupLogFileMap() throws IOException {<a name="line.66"></a>
-<span class="sourceLineNo">067</span>    List&lt;String&gt; logList;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    HashMap&lt;String, Long&gt; newTimestamps;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    HashMap&lt;String, Long&gt; previousTimestampMins;<a name="line.69"></a>
-<span class="sourceLineNo">070</span><a name="line.70"></a>
-<span class="sourceLineNo">071</span>    String savedStartCode = readBackupStartCode();<a name="line.71"></a>
-<span class="sourceLineNo">072</span><a name="line.72"></a>
-<span class="sourceLineNo">073</span>    // key: tableName<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    // value: &lt;RegionServer,PreviousTimeStamp&gt;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    HashMap&lt;TableName, HashMap&lt;String, Long&gt;&gt; previousTimestampMap = readLogTimestampMap();<a name="line.75"></a>
-<span class="sourceLineNo">076</span><a name="line.76"></a>
-<span class="sourceLineNo">077</span>    previousTimestampMins = BackupUtils.getRSLogTimestampMins(previousTimestampMap);<a name="line.77"></a>
-<span class="sourceLineNo">078</span><a name="line.78"></a>
-<span class="sourceLineNo">079</span>    if (LOG.isDebugEnabled()) {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>      LOG.debug("StartCode " + savedStartCode + "for backupID " + backupInfo.getBackupId());<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    }<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    // get all new log files from .logs and .oldlogs after last TS and before new timestamp<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    if (savedStartCode == null || previousTimestampMins == null<a name="line.83"></a>
-<span class="sourceLineNo">084</span>        || previousTimestampMins.isEmpty()) {<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      throw new IOException(<a name="line.85"></a>
-<span class="sourceLineNo">086</span>          "Cannot read any previous back up timestamps from backup system table. "<a name="line.86"></a>
-<span class="sourceLineNo">087</span>              + "In order to create an incremental backup, at least one full backup is needed.");<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    }<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>    LOG.info("Execute roll log procedure for incremental backup ...");<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    HashMap&lt;String, String&gt; props = new HashMap&lt;&gt;();<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    props.put("backupRoot", backupInfo.getBackupRootDir());<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>    try (Admin admin = conn.getAdmin()) {<a name="line.94"></a>
-<span class="sourceLineNo">095</span>      admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,<a name="line.95"></a>
-<span class="sourceLineNo">096</span>        LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    }<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    newTimestamps = readRegionServerLastLogRollResult();<a name="line.98"></a>
-<span class="sourceLineNo">099</span><a name="line.99"></a>
-<span class="sourceLineNo">100</span>    logList = getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, savedStartCode);<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    List&lt;WALItem&gt; logFromSystemTable =<a name="line.101"></a>
-<span class="sourceLineNo">102</span>        getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, getBackupInfo()<a name="line.102"></a>
-<span class="sourceLineNo">103</span>            .getBackupRootDir());<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    logList = excludeAlreadyBackedUpWALs(logList, logFromSystemTable);<a name="line.104"></a>
-<span class="sourceLineNo">105</span>    backupInfo.setIncrBackupFileList(logList);<a name="line.105"></a>
-<span class="sourceLineNo">106</span><a name="line.106"></a>
-<span class="sourceLineNo">107</span>    return newTimestamps;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  }<a name="line.108"></a>
-<span class="sourceLineNo">109</span><a name="line.109"></a>
-<span class="sourceLineNo">110</span>  /**<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * Get list of WAL files eligible for incremental backup.<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   *<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   * @return list of WAL files<a name="line.113"></a>
-<span class="sourceLineNo">114</span>   * @throws IOException if getting the list of WAL files fails<a name="line.114"></a>
-<span class="sourceLineNo">115</span>   */<a name="line.115"></a>
-<span class="sourceLineNo">116</span>  public List&lt;String&gt; getIncrBackupLogFileList() throws IOException {<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    List&lt;String&gt; logList;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    HashMap&lt;String, Long&gt; newTimestamps;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    HashMap&lt;String, Long&gt; previousTimestampMins;<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>    String savedStartCode = readBackupStartCode();<a name="line.121"></a>
-<span class="sourceLineNo">122</span><a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // key: tableName<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    // value: &lt;RegionServer,PreviousTimeStamp&gt;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    HashMap&lt;TableName, HashMap&lt;String, Long&gt;&gt; previousTimestampMap = readLogTimestampMap();<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>    previousTimestampMins = BackupUtils.getRSLogTimestampMins(previousTimestampMap);<a name="line.127"></a>
-<span class="sourceLineNo">128</span><a name="line.128"></a>
-<span class="sourceLineNo">129</span>    if (LOG.isDebugEnabled()) {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>      LOG.debug("StartCode " + savedStartCode + "for backupID " + backupInfo.getBackupId());<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    }<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    // get all new log files from .logs and .oldlogs after last TS and before new timestamp<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    if (savedStartCode == null || previousTimestampMins == null<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        || previousTimestampMins.isEmpty()) {<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      throw new IOException(<a name="line.135"></a>
-<span class="sourceLineNo">136</span>          "Cannot read any previous back up timestamps from backup system table. "<a name="line.136"></a>
-<span class="sourceLineNo">137</span>              + "In order to create an incremental backup, at least one full backup is needed.");<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    }<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>    newTimestamps = readRegionServerLastLogRollResult();<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    logList = getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, savedStartCode);<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    List&lt;WALItem&gt; logFromSystemTable =<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, getBackupInfo()<a name="line.144"></a>
-<span class="sourceLineNo">145</span>            .getBackupRootDir());<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>    logList = excludeAlreadyBackedUpWALs(logList, logFromSystemTable);<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    backupInfo.setIncrBackupFileList(logList);<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    return logList;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.slf4j.Logger;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.slf4j.LoggerFactory;<a name="line.46"></a>
+<span class="sourceLineNo">047</span><a name="line.47"></a>
+<span class="sourceLineNo">048</span>/**<a name="line.48"></a>
+<span class="sourceLineNo">049</span> * After a full backup was created, the incremental backup will only store the changes made after<a name="line.49"></a>
+<span class="sourceLineNo">050</span> * the last full or incremental backup. Creating the backup copies the logfiles in .logs and<a name="line.50"></a>
+<span class="sourceLineNo">051</span> * .oldlogs since the last backup timestamp.<a name="line.51"></a>
+<span class="sourceLineNo">052</span> */<a name="line.52"></a>
+<span class="sourceLineNo">053</span>@InterfaceAudience.Private<a name="line.53"></a>
+<span class="sourceLineNo">054</span>public class IncrementalBackupManager extends BackupManager {<a name="line.54"></a>
+<span class="sourceLineNo">055</span>  public static final Logger LOG = LoggerFactory.getLogger(IncrementalBackupManager.class);<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>  public IncrementalBackupManager(Connection conn, Configuration conf) throws IOException {<a name="line.57"></a>
+<span class="sourceLineNo">058</span>    super(conn, conf);<a name="line.58"></a>
+<span class="sourceLineNo">059</span>  }<a name="line.59"></a>
+<span class="sourceLineNo">060</span><a name="line.60"></a>
+<span class="sourceLineNo">061</span>  /**<a name="line.61"></a>
+<span class="sourceLineNo">062</span>   * Obtain the list of logs that need to be copied out for this incremental backup. The list is set<a name="line.62"></a>
+<span class="sourceLineNo">063</span>   * in BackupInfo.<a name="line.63"></a>
+<span class="sourceLineNo">064</span>   * @return The new HashMap of RS log time stamps after the log roll for this incremental backup.<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   * @throws IOException exception<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   */<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  public HashMap&lt;String, Long&gt; getIncrBackupLogFileMap() throws IOException {<a name="line.67"></a>
+<span class="sourceLineNo">068</span>    List&lt;String&gt; logList;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>    HashMap&lt;String, Long&gt; newTimestamps;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    HashMap&lt;String, Long&gt; previousTimestampMins;<a name="line.70"></a>
+<span class="sourceLineNo">071</span><a name="line.71"></a>
+<span class="sourceLineNo">072</span>    String savedStartCode = readBackupStartCode();<a name="line.72"></a>
+<span class="sourceLineNo">073</span><a name="line.73"></a>
+<span class="sourceLineNo">074</span>    // key: tableName<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    // value: &lt;RegionServer,PreviousTimeStamp&gt;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    HashMap&lt;TableName, HashMap&lt;String, Long&gt;&gt; previousTimestampMap = readLogTimestampMap();<a name="line.76"></a>
+<span class="sourceLineNo">077</span><a name="line.77"></a>
+<span class="sourceLineNo">078</span>    previousTimestampMins = BackupUtils.getRSLogTimestampMins(previousTimestampMap);<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>    if (LOG.isDebugEnabled()) {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>      LOG.debug("StartCode " + savedStartCode + "for backupID " + backupInfo.getBackupId());<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    }<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    // get all new log files from .logs and .oldlogs after last TS and before new timestamp<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    if (savedStartCode == null || previousTimestampMins == null<a name="line.84"></a>
+<span class="sourceLineNo">085</span>        || previousTimestampMins.isEmpty()) {<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      throw new IOException(<a name="line.86"></a>
+<span class="sourceLineNo">087</span>          "Cannot read any previous back up timestamps from backup system table. "<a name="line.87"></a>
+<span class="sourceLineNo">088</span>              + "In order to create an incremental backup, at least one full backup is needed.");<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    }<a name="line.89"></a>
+<span class="sourceLineNo">090</span><a name="line.90"></a>
+<span class="sourceLineNo">091</span>    LOG.info("Execute roll log procedure for incremental backup ...");<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    HashMap&lt;String, String&gt; props = new HashMap&lt;&gt;();<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    props.put("backupRoot", backupInfo.getBackupRootDir());<a name="line.93"></a>
+<span class="sourceLineNo">094</span><a name="line.94"></a>
+<span class="sourceLineNo">095</span>    try (Admin admin = conn.getAdmin()) {<a name="line.95"></a>
+<span class="sourceLineNo">096</span>      admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,<a name="line.96"></a>
+<span class="sourceLineNo">097</span>        LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    }<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    newTimestamps = readRegionServerLastLogRollResult();<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>    logList = getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, savedStartCode);<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    List&lt;WALItem&gt; logFromSystemTable =<a name="line.102"></a>
+<span class="sourceLineNo">103</span>        getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, getBackupInfo()<a name="line.103"></a>
+<span class="sourceLineNo">104</span>            .getBackupRootDir());<a name="line.104"></a>
+<span class="sourceLineNo">105</span>    logList = excludeAlreadyBackedUpAndProcV2WALs(logList, logFromSystemTable);<a name="line.105"></a>
+<span class="sourceLineNo">106</span>    backupInfo.setIncrBackupFileList(logList);<a name="line.106"></a>
+<span class="sourceLineNo">107</span><a name="line.107"></a>
+<span class="sourceLineNo">108</span>    return newTimestamps;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
+<span class="sourceLineNo">110</span><a name="line.110"></a>
+<span class="sourceLineNo">111</span>  /**<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * Get list of WAL files eligible for incremental backup.<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   *<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * @return list of WAL files<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   * @throws IOException if getting the list of WAL files fails<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   */<a name="line.116"></a>
+<span class="sourceLineNo">117</span>  public List&lt;String&gt; getIncrBackupLogFileList() throws IOException {<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    List&lt;String&gt; logList;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    HashMap&lt;String, Long&gt; newTimestamps;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    HashMap&lt;String, Long&gt; previousTimestampMins;<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>    String savedStartCode = readBackupStartCode();<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // key: tableName<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // value: &lt;RegionServer,PreviousTimeStamp&gt;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    HashMap&lt;TableName, HashMap&lt;String, Long&gt;&gt; previousTimestampMap = readLogTimestampMap();<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>    previousTimestampMins = BackupUtils.getRSLogTimestampMins(previousTimestampMap);<a name="line.128"></a>
+<span class="sourceLineNo">129</span><a name="line.129"></a>
+<span class="sourceLineNo">130</span>    if (LOG.isDebugEnabled()) {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>      LOG.debug("StartCode " + savedStartCode + "for backupID " + backupInfo.getBackupId());<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    }<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    // get all new log files from .logs and .oldlogs after last TS and before new timestamp<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    if (savedStartCode == null || previousTimestampMins == null<a name="line.134"></a>
+<span class="sourceLineNo">135</span>        || previousTimestampMins.isEmpty()) {<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      throw new IOException(<a name="line.136"></a>
+<span class="sourceLineNo">137</span>          "Cannot read any previous back up timestamps from backup system table. "<a name="line.137"></a>
+<span class="sourceLineNo">138</span>              + "In order to create an incremental backup, at least one full backup is needed.");<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    }<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>    newTimestamps = readRegionServerLastLogRollResult();<a name="line.141"></a>
+<span class="sourceLineNo">142</span><a name="line.142"></a>
+<span class="sourceLineNo">143</span>    logList = getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, savedStartCode);<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    List&lt;WALItem&gt; logFromSystemTable =<a name="line.144"></a>
+<span class="sourceLineNo">145</span>        getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, getBackupInfo()<a name="line.145"></a>
+<span class="sourceLineNo">146</span>            .getBackupRootDir());<a name="line.146"></a>
+<span class="sourceLineNo">147</span><a name="line.147"></a>
+<span class="sourceLineNo">148</span>    logList = excludeAlreadyBackedUpAndProcV2WALs(logList, logFromSystemTable);<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    backupInfo.setIncrBackupFileList(logList);<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>    return logList;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>  }<a name="line.152"></a>
 <span class="sourceLineNo">153</span><a name="line.153"></a>
-<span class="sourceLineNo">154</span>  private List&lt;String&gt; excludeAlreadyBackedUpWALs(List&lt;String&gt; logList,<a name="line.154"></a>
+<span class="sourceLineNo">154</span>  private List&lt;String&gt; excludeAlreadyBackedUpAndProcV2WALs(List&lt;String&gt; logList,<a name="line.154"></a>
 <span class="sourceLineNo">155</span>      List&lt;WALItem&gt; logFromSystemTable) {<a name="line.155"></a>
 <span class="sourceLineNo">156</span>    Set&lt;String&gt; walFileNameSet = convertToSet(logFromSystemTable);<a name="line.156"></a>
 <span class="sourceLineNo">157</span><a name="line.157"></a>
@@ -168,7 +168,7 @@
 <span class="sourceLineNo">160</span>      Path p = new Path(logList.get(i));<a name="line.160"></a>
 <span class="sourceLineNo">161</span>      String name  = p.getName();<a name="line.161"></a>
 <span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>      if (walFileNameSet.contains(name)) {<a name="line.163"></a>
+<span class="sourceLineNo">163</span>      if (walFileNameSet.contains(name) || name.startsWith(WALProcedureStore.LOG_PREFIX)) {<a name="line.163"></a>
 <span class="sourceLineNo">164</span>        continue;<a name="line.164"></a>
 <span class="sourceLineNo">165</span>      }<a name="line.165"></a>
 <span class="sourceLineNo">166</span><a name="line.166"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html b/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
index 1c79f3b..4530d16 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
@@ -105,51 +105,50 @@
 <span class="sourceLineNo">097</span>        LOG.warn("Backup system table is not available: {}", tnfe.getMessage());<a name="line.97"></a>
 <span class="sourceLineNo">098</span>        return files;<a name="line.98"></a>
 <span class="sourceLineNo">099</span>      }<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>      List&lt;FileStatus&gt; list = new ArrayList&lt;&gt;();<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      Map&lt;FileStatus, Boolean&gt; walFilesDeletableMap = table.areWALFilesDeletable(files);<a name="line.102"></a>
-<span class="sourceLineNo">103</span>      for (Map.Entry&lt;FileStatus, Boolean&gt; entry: walFilesDeletableMap.entrySet()) {<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        FileStatus file = entry.getKey();<a name="line.104"></a>
-<span class="sourceLineNo">105</span>        String wal = file.getPath().toString();<a name="line.105"></a>
-<span class="sourceLineNo">106</span>        boolean deletable = entry.getValue();<a name="line.106"></a>
-<span class="sourceLineNo">107</span>        if (deletable) {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>          LOG.debug("Found log file in backup system table, deleting: {}", wal);<a name="line.108"></a>
-<span class="sourceLineNo">109</span>          list.add(file);<a name="line.109"></a>
-<span class="sourceLineNo">110</span>        } else {<a name="line.110"></a>
-<span class="sourceLineNo">111</span>          LOG.debug("Did not find this log in backup system table, keeping: {}", wal);<a name="line.111"></a>
-<span class="sourceLineNo">112</span>        }<a name="line.112"></a>
-<span class="sourceLineNo">113</span>      }<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      return list;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    } catch (IOException e) {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>      LOG.error("Failed to get backup system table table, therefore will keep all files", e);<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      // nothing to delete<a name="line.117"></a>
-<span class="sourceLineNo">118</span>      return Collections.emptyList();<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>  @Override<a name="line.122"></a>
-<span class="sourceLineNo">123</span>  public void setConf(Configuration config) {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    // If backup is disabled, keep all members null<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    super.setConf(config);<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    if (!config.getBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY,<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      BackupRestoreConstants.BACKUP_ENABLE_DEFAULT)) {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      LOG.warn("Backup is disabled - allowing all wals to be deleted");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    }<a name="line.129"></a>
-<span class="sourceLineNo">130</span>  }<a name="line.130"></a>
-<span class="sourceLineNo">131</span><a name="line.131"></a>
-<span class="sourceLineNo">132</span>  @Override<a name="line.132"></a>
-<span class="sourceLineNo">133</span>  public void stop(String why) {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    if (!this.stopped) {<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      this.stopped = true;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>      LOG.info("Stopping BackupLogCleaner");<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    }<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  }<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>  @Override<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  public boolean isStopped() {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    return this.stopped;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>  }<a name="line.143"></a>
-<span class="sourceLineNo">144</span>}<a name="line.144"></a>
+<span class="sourceLineNo">100</span>      List&lt;FileStatus&gt; list = new ArrayList&lt;&gt;();<a name="line.100"></a>
+<span class="sourceLineNo">101</span>      Map&lt;FileStatus, Boolean&gt; walFilesDeletableMap = table.areWALFilesDeletable(files);<a name="line.101"></a>
+<span class="sourceLineNo">102</span>      for (Map.Entry&lt;FileStatus, Boolean&gt; entry: walFilesDeletableMap.entrySet()) {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>        FileStatus file = entry.getKey();<a name="line.103"></a>
+<span class="sourceLineNo">104</span>        String wal = file.getPath().toString();<a name="line.104"></a>
+<span class="sourceLineNo">105</span>        boolean deletable = entry.getValue();<a name="line.105"></a>
+<span class="sourceLineNo">106</span>        if (deletable) {<a name="line.106"></a>
+<span class="sourceLineNo">107</span>          LOG.debug("Found log file in backup system table, deleting: {}", wal);<a name="line.107"></a>
+<span class="sourceLineNo">108</span>          list.add(file);<a name="line.108"></a>
+<span class="sourceLineNo">109</span>        } else {<a name="line.109"></a>
+<span class="sourceLineNo">110</span>          LOG.debug("Did not find this log in backup system table, keeping: {}", wal);<a name="line.110"></a>
+<span class="sourceLineNo">111</span>        }<a name="line.111"></a>
+<span class="sourceLineNo">112</span>      }<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      return list;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    } catch (IOException e) {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      LOG.error("Failed to get backup system table table, therefore will keep all files", e);<a name="line.115"></a>
+<span class="sourceLineNo">116</span>      // nothing to delete<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      return Collections.emptyList();<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
+<span class="sourceLineNo">119</span>  }<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  @Override<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public void setConf(Configuration config) {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    // If backup is disabled, keep all members null<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    super.setConf(config);<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    if (!config.getBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY,<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      BackupRestoreConstants.BACKUP_ENABLE_DEFAULT)) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      LOG.warn("Backup is disabled - allowing all wals to be deleted");<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  }<a name="line.129"></a>
+<span class="sourceLineNo">130</span><a name="line.130"></a>
+<span class="sourceLineNo">131</span>  @Override<a name="line.131"></a>
+<span class="sourceLineNo">132</span>  public void stop(String why) {<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    if (!this.stopped) {<a name="line.133"></a>
+<span class="sourceLineNo">134</span>      this.stopped = true;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      LOG.info("Stopping BackupLogCleaner");<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  @Override<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public boolean isStopped() {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    return this.stopped;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
+<span class="sourceLineNo">143</span>}<a name="line.143"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
index 4160a88..ad93f10 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
@@ -960,13 +960,13 @@
 <span class="sourceLineNo">952</span>                           Throwable error, long backOffTime, boolean willRetry, String startTime,<a name="line.952"></a>
 <span class="sourceLineNo">953</span>                           int failed, int stopped) {<a name="line.953"></a>
 <span class="sourceLineNo">954</span>    StringBuilder sb = new StringBuilder();<a name="line.954"></a>
-<span class="sourceLineNo">955</span>    sb.append("id=").append(asyncProcess.id).append(", table=").append(tableName).append(", ")<a name="line.955"></a>
-<span class="sourceLineNo">956</span>        .append("attempt=").append(numAttempt)<a name="line.956"></a>
-<span class="sourceLineNo">957</span>        .append("/").append(asyncProcess.numTries).append(", ");<a name="line.957"></a>
+<span class="sourceLineNo">955</span>    sb.append("id=").append(asyncProcess.id).append(", table=").append(tableName).<a name="line.955"></a>
+<span class="sourceLineNo">956</span>        append(", attempt=").append(numAttempt).append("/").append(asyncProcess.numTries).<a name="line.956"></a>
+<span class="sourceLineNo">957</span>        append(", ");<a name="line.957"></a>
 <span class="sourceLineNo">958</span><a name="line.958"></a>
 <span class="sourceLineNo">959</span>    if (failureCount &gt; 0 || error != null){<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      sb.append("failed=").append(failureCount).append("ops").append(", last exception=").<a name="line.960"></a>
-<span class="sourceLineNo">961</span>          append(error == null ? "null" : error);<a name="line.961"></a>
+<span class="sourceLineNo">960</span>      sb.append("failureCount=").append(failureCount).append("ops").append(", last exception=").<a name="line.960"></a>
+<span class="sourceLineNo">961</span>          append(error);<a name="line.961"></a>
 <span class="sourceLineNo">962</span>    } else {<a name="line.962"></a>
 <span class="sourceLineNo">963</span>      sb.append("succeeded");<a name="line.963"></a>
 <span class="sourceLineNo">964</span>    }<a name="line.964"></a>
@@ -975,15 +975,15 @@
 <span class="sourceLineNo">967</span><a name="line.967"></a>
 <span class="sourceLineNo">968</span>    if (willRetry) {<a name="line.968"></a>
 <span class="sourceLineNo">969</span>      sb.append(", retrying after=").append(backOffTime).append("ms").<a name="line.969"></a>
-<span class="sourceLineNo">970</span>          append(", replay=").append(replaySize).append("ops");<a name="line.970"></a>
+<span class="sourceLineNo">970</span>          append(", operationsToReplay=").append(replaySize);<a name="line.970"></a>
 <span class="sourceLineNo">971</span>    } else if (failureCount &gt; 0) {<a name="line.971"></a>
 <span class="sourceLineNo">972</span>      if (stopped &gt; 0) {<a name="line.972"></a>
-<span class="sourceLineNo">973</span>        sb.append("; not retrying ").append(stopped).append(" due to success from other replica");<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      }<a name="line.974"></a>
-<span class="sourceLineNo">975</span>      if (failed &gt; 0) {<a name="line.975"></a>
-<span class="sourceLineNo">976</span>        sb.append("; not retrying ").append(failed).append(" - final failure");<a name="line.976"></a>
-<span class="sourceLineNo">977</span>      }<a name="line.977"></a>
-<span class="sourceLineNo">978</span><a name="line.978"></a>
+<span class="sourceLineNo">973</span>        sb.append("; NOT retrying, stopped=").append(stopped).<a name="line.973"></a>
+<span class="sourceLineNo">974</span>            append(" because successful operation on other replica");<a name="line.974"></a>
+<span class="sourceLineNo">975</span>      }<a name="line.975"></a>
+<span class="sourceLineNo">976</span>      if (failed &gt; 0) {<a name="line.976"></a>
+<span class="sourceLineNo">977</span>        sb.append("; NOT retrying, failed=").append(failed).append(" -- final attempt!");<a name="line.977"></a>
+<span class="sourceLineNo">978</span>      }<a name="line.978"></a>
 <span class="sourceLineNo">979</span>    }<a name="line.979"></a>
 <span class="sourceLineNo">980</span><a name="line.980"></a>
 <span class="sourceLineNo">981</span>    return sb.toString();<a name="line.981"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
index 4160a88..ad93f10 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
@@ -960,13 +960,13 @@
 <span class="sourceLineNo">952</span>                           Throwable error, long backOffTime, boolean willRetry, String startTime,<a name="line.952"></a>
 <span class="sourceLineNo">953</span>                           int failed, int stopped) {<a name="line.953"></a>
 <span class="sourceLineNo">954</span>    StringBuilder sb = new StringBuilder();<a name="line.954"></a>
-<span class="sourceLineNo">955</span>    sb.append("id=").append(asyncProcess.id).append(", table=").append(tableName).append(", ")<a name="line.955"></a>
-<span class="sourceLineNo">956</span>        .append("attempt=").append(numAttempt)<a name="line.956"></a>
-<span class="sourceLineNo">957</span>        .append("/").append(asyncProcess.numTries).append(", ");<a name="line.957"></a>
+<span class="sourceLineNo">955</span>    sb.append("id=").append(asyncProcess.id).append(", table=").append(tableName).<a name="line.955"></a>
+<span class="sourceLineNo">956</span>        append(", attempt=").append(numAttempt).append("/").append(asyncProcess.numTries).<a name="line.956"></a>
+<span class="sourceLineNo">957</span>        append(", ");<a name="line.957"></a>
 <span class="sourceLineNo">958</span><a name="line.958"></a>
 <span class="sourceLineNo">959</span>    if (failureCount &gt; 0 || error != null){<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      sb.append("failed=").append(failureCount).append("ops").append(", last exception=").<a name="line.960"></a>
-<span class="sourceLineNo">961</span>          append(error == null ? "null" : error);<a name="line.961"></a>
+<span class="sourceLineNo">960</span>      sb.append("failureCount=").append(failureCount).append("ops").append(", last exception=").<a name="line.960"></a>
+<span class="sourceLineNo">961</span>          append(error);<a name="line.961"></a>
 <span class="sourceLineNo">962</span>    } else {<a name="line.962"></a>
 <span class="sourceLineNo">963</span>      sb.append("succeeded");<a name="line.963"></a>
 <span class="sourceLineNo">964</span>    }<a name="line.964"></a>
@@ -975,15 +975,15 @@
 <span class="sourceLineNo">967</span><a name="line.967"></a>
 <span class="sourceLineNo">968</span>    if (willRetry) {<a name="line.968"></a>
 <span class="sourceLineNo">969</span>      sb.append(", retrying after=").append(backOffTime).append("ms").<a name="line.969"></a>
-<span class="sourceLineNo">970</span>          append(", replay=").append(replaySize).append("ops");<a name="line.970"></a>
+<span class="sourceLineNo">970</span>          append(", operationsToReplay=").append(replaySize);<a name="line.970"></a>
 <span class="sourceLineNo">971</span>    } else if (failureCount &gt; 0) {<a name="line.971"></a>
 <span class="sourceLineNo">972</span>      if (stopped &gt; 0) {<a name="line.972"></a>
-<span class="sourceLineNo">973</span>        sb.append("; not retrying ").append(stopped).append(" due to success from other replica");<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      }<a name="line.974"></a>
-<span class="sourceLineNo">975</span>      if (failed &gt; 0) {<a name="line.975"></a>
-<span class="sourceLineNo">976</span>        sb.append("; not retrying ").append(failed).append(" - final failure");<a name="line.976"></a>
-<span class="sourceLineNo">977</span>      }<a name="line.977"></a>
-<span class="sourceLineNo">978</span><a name="line.978"></a>
+<span class="sourceLineNo">973</span>        sb.append("; NOT retrying, stopped=").append(stopped).<a name="line.973"></a>
+<span class="sourceLineNo">974</span>            append(" because successful operation on other replica");<a name="line.974"></a>
+<span class="sourceLineNo">975</span>      }<a name="line.975"></a>
+<span class="sourceLineNo">976</span>      if (failed &gt; 0) {<a name="line.976"></a>
+<span class="sourceLineNo">977</span>        sb.append("; NOT retrying, failed=").append(failed).append(" -- final attempt!");<a name="line.977"></a>
+<span class="sourceLineNo">978</span>      }<a name="line.978"></a>
 <span class="sourceLineNo">979</span>    }<a name="line.979"></a>
 <span class="sourceLineNo">980</span><a name="line.980"></a>
 <span class="sourceLineNo">981</span>    return sb.toString();<a name="line.981"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
index 4160a88..ad93f10 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
@@ -960,13 +960,13 @@
 <span class="sourceLineNo">952</span>                           Throwable error, long backOffTime, boolean willRetry, String startTime,<a name="line.952"></a>
 <span class="sourceLineNo">953</span>                           int failed, int stopped) {<a name="line.953"></a>
 <span class="sourceLineNo">954</span>    StringBuilder sb = new StringBuilder();<a name="line.954"></a>
-<span class="sourceLineNo">955</span>    sb.append("id=").append(asyncProcess.id).append(", table=").append(tableName).append(", ")<a name="line.955"></a>
-<span class="sourceLineNo">956</span>        .append("attempt=").append(numAttempt)<a name="line.956"></a>
-<span class="sourceLineNo">957</span>        .append("/").append(asyncProcess.numTries).append(", ");<a name="line.957"></a>
+<span class="sourceLineNo">955</span>    sb.append("id=").append(asyncProcess.id).append(", table=").append(tableName).<a name="line.955"></a>
+<span class="sourceLineNo">956</span>        append(", attempt=").append(numAttempt).append("/").append(asyncProcess.numTries).<a name="line.956"></a>
+<span class="sourceLineNo">957</span>        append(", ");<a name="line.957"></a>
 <span class="sourceLineNo">958</span><a name="line.958"></a>
 <span class="sourceLineNo">959</span>    if (failureCount &gt; 0 || error != null){<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      sb.append("failed=").append(failureCount).append("ops").append(", last exception=").<a name="line.960"></a>
-<span class="sourceLineNo">961</span>          append(error == null ? "null" : error);<a name="line.961"></a>
+<span class="sourceLineNo">960</span>      sb.append("failureCount=").append(failureCount).append("ops").append(", last exception=").<a name="line.960"></a>
+<span class="sourceLineNo">961</span>          append(error);<a name="line.961"></a>
 <span class="sourceLineNo">962</span>    } else {<a name="line.962"></a>
 <span class="sourceLineNo">963</span>      sb.append("succeeded");<a name="line.963"></a>
 <span class="sourceLineNo">964</span>    }<a name="line.964"></a>
@@ -975,15 +975,15 @@
 <span class="sourceLineNo">967</span><a name="line.967"></a>
 <span class="sourceLineNo">968</span>    if (willRetry) {<a name="line.968"></a>
 <span class="sourceLineNo">969</span>      sb.append(", retrying after=").append(backOffTime).append("ms").<a name="line.969"></a>
-<span class="sourceLineNo">970</span>          append(", replay=").append(replaySize).append("ops");<a name="line.970"></a>
+<span class="sourceLineNo">970</span>          append(", operationsToReplay=").append(replaySize);<a name="line.970"></a>
 <span class="sourceLineNo">971</span>    } else if (failureCount &gt; 0) {<a name="line.971"></a>
 <span class="sourceLineNo">972</span>      if (stopped &gt; 0) {<a name="line.972"></a>
-<span class="sourceLineNo">973</span>        sb.append("; not retrying ").append(stopped).append(" due to success from other replica");<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      }<a name="line.974"></a>
-<span class="sourceLineNo">975</span>      if (failed &gt; 0) {<a name="line.975"></a>
-<span class="sourceLineNo">976</span>        sb.append("; not retrying ").append(failed).append(" - final failure");<a name="line.976"></a>
-<span class="sourceLineNo">977</span>      }<a name="line.977"></a>
-<span class="sourceLineNo">978</span><a name="line.978"></a>
+<span class="sourceLineNo">973</span>        sb.append("; NOT retrying, stopped=").append(stopped).<a name="line.973"></a>
+<span class="sourceLineNo">974</span>            append(" because successful operation on other replica");<a name="line.974"></a>
+<span class="sourceLineNo">975</span>      }<a name="line.975"></a>
+<span class="sourceLineNo">976</span>      if (failed &gt; 0) {<a name="line.976"></a>
+<span class="sourceLineNo">977</span>        sb.append("; NOT retrying, failed=").append(failed).append(" -- final attempt!");<a name="line.977"></a>
+<span class="sourceLineNo">978</span>      }<a name="line.978"></a>
 <span class="sourceLineNo">979</span>    }<a name="line.979"></a>
 <span class="sourceLineNo">980</span><a name="line.980"></a>
 <span class="sourceLineNo">981</span>    return sb.toString();<a name="line.981"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
index 4160a88..ad93f10 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
@@ -960,13 +960,13 @@
 <span class="sourceLineNo">952</span>                           Throwable error, long backOffTime, boolean willRetry, String startTime,<a name="line.952"></a>
 <span class="sourceLineNo">953</span>                           int failed, int stopped) {<a name="line.953"></a>
 <span class="sourceLineNo">954</span>    StringBuilder sb = new StringBuilder();<a name="line.954"></a>
-<span class="sourceLineNo">955</span>    sb.append("id=").append(asyncProcess.id).append(", table=").append(tableName).append(", ")<a name="line.955"></a>
-<span class="sourceLineNo">956</span>        .append("attempt=").append(numAttempt)<a name="line.956"></a>
-<span class="sourceLineNo">957</span>        .append("/").append(asyncProcess.numTries).append(", ");<a name="line.957"></a>
+<span class="sourceLineNo">955</span>    sb.append("id=").append(asyncProcess.id).append(", table=").append(tableName).<a name="line.955"></a>
+<span class="sourceLineNo">956</span>        append(", attempt=").append(numAttempt).append("/").append(asyncProcess.numTries).<a name="line.956"></a>
+<span class="sourceLineNo">957</span>        append(", ");<a name="line.957"></a>
 <span class="sourceLineNo">958</span><a name="line.958"></a>
 <span class="sourceLineNo">959</span>    if (failureCount &gt; 0 || error != null){<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      sb.append("failed=").append(failureCount).append("ops").append(", last exception=").<a name="line.960"></a>
-<span class="sourceLineNo">961</span>          append(error == null ? "null" : error);<a name="line.961"></a>
+<span class="sourceLineNo">960</span>      sb.append("failureCount=").append(failureCount).append("ops").append(", last exception=").<a name="line.960"></a>
+<span class="sourceLineNo">961</span>          append(error);<a name="line.961"></a>
 <span class="sourceLineNo">962</span>    } else {<a name="line.962"></a>
 <span class="sourceLineNo">963</span>      sb.append("succeeded");<a name="line.963"></a>
 <span class="sourceLineNo">964</span>    }<a name="line.964"></a>
@@ -975,15 +975,15 @@
 <span class="sourceLineNo">967</span><a name="line.967"></a>
 <span class="sourceLineNo">968</span>    if (willRetry) {<a name="line.968"></a>
 <span class="sourceLineNo">969</span>      sb.append(", retrying after=").append(backOffTime).append("ms").<a name="line.969"></a>
-<span class="sourceLineNo">970</span>          append(", replay=").append(replaySize).append("ops");<a name="line.970"></a>
+<span class="sourceLineNo">970</span>          append(", operationsToReplay=").append(replaySize);<a name="line.970"></a>
 <span class="sourceLineNo">971</span>    } else if (failureCount &gt; 0) {<a name="line.971"></a>
 <span class="sourceLineNo">972</span>      if (stopped &gt; 0) {<a name="line.972"></a>
-<span class="sourceLineNo">973</span>        sb.append("; not retrying ").append(stopped).append(" due to success from other replica");<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      }<a name="line.974"></a>
-<span class="sourceLineNo">975</span>      if (failed &gt; 0) {<a name="line.975"></a>
-<span class="sourceLineNo">976</span>        sb.append("; not retrying ").append(failed).append(" - final failure");<a name="line.976"></a>
-<span class="sourceLineNo">977</span>      }<a name="line.977"></a>
-<span class="sourceLineNo">978</span><a name="line.978"></a>
+<span class="sourceLineNo">973</span>        sb.append("; NOT retrying, stopped=").append(stopped).<a name="line.973"></a>
+<span class="sourceLineNo">974</span>            append(" because successful operation on other replica");<a name="line.974"></a>
+<span class="sourceLineNo">975</span>      }<a name="line.975"></a>
+<span class="sourceLineNo">976</span>      if (failed &gt; 0) {<a name="line.976"></a>
+<span class="sourceLineNo">977</span>        sb.append("; NOT retrying, failed=").append(failed).append(" -- final attempt!");<a name="line.977"></a>
+<span class="sourceLineNo">978</span>      }<a name="line.978"></a>
 <span class="sourceLineNo">979</span>    }<a name="line.979"></a>
 <span class="sourceLineNo">980</span><a name="line.980"></a>
 <span class="sourceLineNo">981</span>    return sb.toString();<a name="line.981"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
index 4160a88..ad93f10 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
@@ -960,13 +960,13 @@
 <span class="sourceLineNo">952</span>                           Throwable error, long backOffTime, boolean willRetry, String startTime,<a name="line.952"></a>
 <span class="sourceLineNo">953</span>                           int failed, int stopped) {<a name="line.953"></a>
 <span class="sourceLineNo">954</span>    StringBuilder sb = new StringBuilder();<a name="line.954"></a>
-<span class="sourceLineNo">955</span>    sb.append("id=").append(asyncProcess.id).append(", table=").append(tableName).append(", ")<a name="line.955"></a>
-<span class="sourceLineNo">956</span>        .append("attempt=").append(numAttempt)<a name="line.956"></a>
-<span class="sourceLineNo">957</span>        .append("/").append(asyncProcess.numTries).append(", ");<a name="line.957"></a>
+<span class="sourceLineNo">955</span>    sb.append("id=").append(asyncProcess.id).append(", table=").append(tableName).<a name="line.955"></a>
+<span class="sourceLineNo">956</span>        append(", attempt=").append(numAttempt).append("/").append(asyncProcess.numTries).<a name="line.956"></a>
+<span class="sourceLineNo">957</span>        append(", ");<a name="line.957"></a>
 <span class="sourceLineNo">958</span><a name="line.958"></a>
 <span class="sourceLineNo">959</span>    if (failureCount &gt; 0 || error != null){<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      sb.append("failed=").append(failureCount).append("ops").append(", last exception=").<a name="line.960"></a>
-<span class="sourceLineNo">961</span>          append(error == null ? "null" : error);<a name="line.961"></a>
+<span class="sourceLineNo">960</span>      sb.append("failureCount=").append(failureCount).append("ops").append(", last exception=").<a name="line.960"></a>
+<span class="sourceLineNo">961</span>          append(error);<a name="line.961"></a>
 <span class="sourceLineNo">962</span>    } else {<a name="line.962"></a>
 <span class="sourceLineNo">963</span>      sb.append("succeeded");<a name="line.963"></a>
 <span class="sourceLineNo">964</span>    }<a name="line.964"></a>
@@ -975,15 +975,15 @@
 <span class="sourceLineNo">967</span><a name="line.967"></a>
 <span class="sourceLineNo">968</span>    if (willRetry) {<a name="line.968"></a>
 <span class="sourceLineNo">969</span>      sb.append(", retrying after=").append(backOffTime).append("ms").<a name="line.969"></a>
-<span class="sourceLineNo">970</span>          append(", replay=").append(replaySize).append("ops");<a name="line.970"></a>
+<span class="sourceLineNo">970</span>          append(", operationsToReplay=").append(replaySize);<a name="line.970"></a>
 <span class="sourceLineNo">971</span>    } else if (failureCount &gt; 0) {<a name="line.971"></a>
 <span class="sourceLineNo">972</span>      if (stopped &gt; 0) {<a name="line.972"></a>
-<span class="sourceLineNo">973</span>        sb.append("; not retrying ").append(stopped).append(" due to success from other replica");<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      }<a name="line.974"></a>
-<span class="sourceLineNo">975</span>      if (failed &gt; 0) {<a name="line.975"></a>
-<span class="sourceLineNo">976</span>        sb.append("; not retrying ").append(failed).append(" - final failure");<a name="line.976"></a>
-<span class="sourceLineNo">977</span>      }<a name="line.977"></a>
-<span class="sourceLineNo">978</span><a name="line.978"></a>
+<span class="sourceLineNo">973</span>        sb.append("; NOT retrying, stopped=").append(stopped).<a name="line.973"></a>
+<span class="sourceLineNo">974</span>            append(" because successful operation on other replica");<a name="line.974"></a>
+<span class="sourceLineNo">975</span>      }<a name="line.975"></a>
+<span class="sourceLineNo">976</span>      if (failed &gt; 0) {<a name="line.976"></a>
+<span class="sourceLineNo">977</span>        sb.append("; NOT retrying, failed=").append(failed).append(" -- final attempt!");<a name="line.977"></a>
+<span class="sourceLineNo">978</span>      }<a name="line.978"></a>
 <span class="sourceLineNo">979</span>    }<a name="line.979"></a>
 <span class="sourceLineNo">980</span><a name="line.980"></a>
 <span class="sourceLineNo">981</span>    return sb.toString();<a name="line.981"></a>


[30/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html
new file mode 100644
index 0000000..3d1edb3
--- /dev/null
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html
@@ -0,0 +1,2186 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html lang="en">
+<head>
+<title>Source code</title>
+<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
+</head>
+<body>
+<div class="sourceContainer">
+<pre><span class="sourceLineNo">001</span>/*<a name="line.1"></a>
+<span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a>
+<span class="sourceLineNo">003</span> * or more contributor license agreements.  See the NOTICE file<a name="line.3"></a>
+<span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a>
+<span class="sourceLineNo">005</span> * regarding copyright ownership.  The ASF licenses this file<a name="line.5"></a>
+<span class="sourceLineNo">006</span> * to you under the Apache License, Version 2.0 (the<a name="line.6"></a>
+<span class="sourceLineNo">007</span> * "License"); you may not use this file except in compliance<a name="line.7"></a>
+<span class="sourceLineNo">008</span> * with the License.  You may obtain a copy of the License at<a name="line.8"></a>
+<span class="sourceLineNo">009</span> *<a name="line.9"></a>
+<span class="sourceLineNo">010</span> *     http://www.apache.org/licenses/LICENSE-2.0<a name="line.10"></a>
+<span class="sourceLineNo">011</span> *<a name="line.11"></a>
+<span class="sourceLineNo">012</span> * Unless required by applicable law or agreed to in writing, software<a name="line.12"></a>
+<span class="sourceLineNo">013</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.13"></a>
+<span class="sourceLineNo">014</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.14"></a>
+<span class="sourceLineNo">015</span> * See the License for the specific language governing permissions and<a name="line.15"></a>
+<span class="sourceLineNo">016</span> * limitations under the License.<a name="line.16"></a>
+<span class="sourceLineNo">017</span> */<a name="line.17"></a>
+<span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.io.hfile;<a name="line.18"></a>
+<span class="sourceLineNo">019</span><a name="line.19"></a>
+<span class="sourceLineNo">020</span>import java.io.DataInputStream;<a name="line.20"></a>
+<span class="sourceLineNo">021</span>import java.io.DataOutput;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import java.io.DataOutputStream;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import java.io.IOException;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import java.io.InputStream;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import java.nio.ByteBuffer;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import java.util.concurrent.locks.Lock;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.28"></a>
+<span class="sourceLineNo">029</span><a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.fs.Path;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.Cell;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.HConstants;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.slf4j.Logger;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.slf4j.LoggerFactory;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.io.ByteArrayOutputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.io.ByteBuffInputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.io.ByteBufferWriterDataOutputStream;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.nio.MultiByteBuff;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.nio.SingleByteBuff;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.ChecksumType;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.io.IOUtils;<a name="line.54"></a>
+<span class="sourceLineNo">055</span><a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.57"></a>
+<span class="sourceLineNo">058</span><a name="line.58"></a>
+<span class="sourceLineNo">059</span>/**<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * Cacheable Blocks of an {@link HFile} version 2 file.<a name="line.60"></a>
+<span class="sourceLineNo">061</span> * Version 2 was introduced in hbase-0.92.0.<a name="line.61"></a>
+<span class="sourceLineNo">062</span> *<a name="line.62"></a>
+<span class="sourceLineNo">063</span> * &lt;p&gt;Version 1 was the original file block. Version 2 was introduced when we changed the hbase file<a name="line.63"></a>
+<span class="sourceLineNo">064</span> * format to support multi-level block indexes and compound bloom filters (HBASE-3857). Support<a name="line.64"></a>
+<span class="sourceLineNo">065</span> * for Version 1 was removed in hbase-1.3.0.<a name="line.65"></a>
+<span class="sourceLineNo">066</span> *<a name="line.66"></a>
+<span class="sourceLineNo">067</span> * &lt;h3&gt;HFileBlock: Version 2&lt;/h3&gt;<a name="line.67"></a>
+<span class="sourceLineNo">068</span> * In version 2, a block is structured as follows:<a name="line.68"></a>
+<span class="sourceLineNo">069</span> * &lt;ul&gt;<a name="line.69"></a>
+<span class="sourceLineNo">070</span> * &lt;li&gt;&lt;b&gt;Header:&lt;/b&gt; See Writer#putHeader() for where header is written; header total size is<a name="line.70"></a>
+<span class="sourceLineNo">071</span> * HFILEBLOCK_HEADER_SIZE<a name="line.71"></a>
+<span class="sourceLineNo">072</span> * &lt;ul&gt;<a name="line.72"></a>
+<span class="sourceLineNo">073</span> * &lt;li&gt;0. blockType: Magic record identifying the {@link BlockType} (8 bytes):<a name="line.73"></a>
+<span class="sourceLineNo">074</span> * e.g. &lt;code&gt;DATABLK*&lt;/code&gt;<a name="line.74"></a>
+<span class="sourceLineNo">075</span> * &lt;li&gt;1. onDiskSizeWithoutHeader: Compressed -- a.k.a 'on disk' -- block size, excluding header,<a name="line.75"></a>
+<span class="sourceLineNo">076</span> * but including tailing checksum bytes (4 bytes)<a name="line.76"></a>
+<span class="sourceLineNo">077</span> * &lt;li&gt;2. uncompressedSizeWithoutHeader: Uncompressed block size, excluding header, and excluding<a name="line.77"></a>
+<span class="sourceLineNo">078</span> * checksum bytes (4 bytes)<a name="line.78"></a>
+<span class="sourceLineNo">079</span> * &lt;li&gt;3. prevBlockOffset: The offset of the previous block of the same type (8 bytes). This is<a name="line.79"></a>
+<span class="sourceLineNo">080</span> * used to navigate to the previous block without having to go to the block index<a name="line.80"></a>
+<span class="sourceLineNo">081</span> * &lt;li&gt;4: For minorVersions &amp;gt;=1, the ordinal describing checksum type (1 byte)<a name="line.81"></a>
+<span class="sourceLineNo">082</span> * &lt;li&gt;5: For minorVersions &amp;gt;=1, the number of data bytes/checksum chunk (4 bytes)<a name="line.82"></a>
+<span class="sourceLineNo">083</span> * &lt;li&gt;6: onDiskDataSizeWithHeader: For minorVersions &amp;gt;=1, the size of data 'on disk', including<a name="line.83"></a>
+<span class="sourceLineNo">084</span> * header, excluding checksums (4 bytes)<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * &lt;/ul&gt;<a name="line.85"></a>
+<span class="sourceLineNo">086</span> * &lt;/li&gt;<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * &lt;li&gt;&lt;b&gt;Raw/Compressed/Encrypted/Encoded data:&lt;/b&gt; The compression<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * algorithm is the same for all the blocks in an {@link HFile}. If compression is NONE, this is<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * just raw, serialized Cells.<a name="line.89"></a>
+<span class="sourceLineNo">090</span> * &lt;li&gt;&lt;b&gt;Tail:&lt;/b&gt; For minorVersions &amp;gt;=1, a series of 4 byte checksums, one each for<a name="line.90"></a>
+<span class="sourceLineNo">091</span> * the number of bytes specified by bytesPerChecksum.<a name="line.91"></a>
+<span class="sourceLineNo">092</span> * &lt;/ul&gt;<a name="line.92"></a>
+<span class="sourceLineNo">093</span> *<a name="line.93"></a>
+<span class="sourceLineNo">094</span> * &lt;h3&gt;Caching&lt;/h3&gt;<a name="line.94"></a>
+<span class="sourceLineNo">095</span> * Caches cache whole blocks with trailing checksums if any. We then tag on some metadata, the<a name="line.95"></a>
+<span class="sourceLineNo">096</span> * content of BLOCK_METADATA_SPACE which will be flag on if we are doing 'hbase'<a name="line.96"></a>
+<span class="sourceLineNo">097</span> * checksums and then the offset into the file which is needed when we re-make a cache key<a name="line.97"></a>
+<span class="sourceLineNo">098</span> * when we return the block to the cache as 'done'.<a name="line.98"></a>
+<span class="sourceLineNo">099</span> * See {@link Cacheable#serialize(ByteBuffer, boolean)} and {@link Cacheable#getDeserializer()}.<a name="line.99"></a>
+<span class="sourceLineNo">100</span> *<a name="line.100"></a>
+<span class="sourceLineNo">101</span> * &lt;p&gt;TODO: Should we cache the checksums? Down in Writer#getBlockForCaching(CacheConfig) where<a name="line.101"></a>
+<span class="sourceLineNo">102</span> * we make a block to cache-on-write, there is an attempt at turning off checksums. This is not the<a name="line.102"></a>
+<span class="sourceLineNo">103</span> * only place we get blocks to cache. We also will cache the raw return from an hdfs read. In this<a name="line.103"></a>
+<span class="sourceLineNo">104</span> * case, the checksums may be present. If the cache is backed by something that doesn't do ECC,<a name="line.104"></a>
+<span class="sourceLineNo">105</span> * say an SSD, we might want to preserve checksums. For now this is open question.<a name="line.105"></a>
+<span class="sourceLineNo">106</span> * &lt;p&gt;TODO: Over in BucketCache, we save a block allocation by doing a custom serialization.<a name="line.106"></a>
+<span class="sourceLineNo">107</span> * Be sure to change it if serialization changes in here. Could we add a method here that takes an<a name="line.107"></a>
+<span class="sourceLineNo">108</span> * IOEngine and that then serializes to it rather than expose our internals over in BucketCache?<a name="line.108"></a>
+<span class="sourceLineNo">109</span> * IOEngine is in the bucket subpackage. Pull it up? Then this class knows about bucketcache. Ugh.<a name="line.109"></a>
+<span class="sourceLineNo">110</span> */<a name="line.110"></a>
+<span class="sourceLineNo">111</span>@InterfaceAudience.Private<a name="line.111"></a>
+<span class="sourceLineNo">112</span>public class HFileBlock implements Cacheable {<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class);<a name="line.113"></a>
+<span class="sourceLineNo">114</span><a name="line.114"></a>
+<span class="sourceLineNo">115</span>  // Block Header fields.<a name="line.115"></a>
+<span class="sourceLineNo">116</span><a name="line.116"></a>
+<span class="sourceLineNo">117</span>  // TODO: encapsulate Header related logic in this inner class.<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  static class Header {<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    // Format of header is:<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    // 8 bytes - block magic<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    // 4 bytes int - onDiskSizeWithoutHeader<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    // 4 bytes int - uncompressedSizeWithoutHeader<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    // 8 bytes long - prevBlockOffset<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // The following 3 are only present if header contains checksum information<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // 1 byte - checksum type<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    // 4 byte int - bytes per checksum<a name="line.126"></a>
+<span class="sourceLineNo">127</span>    // 4 byte int - onDiskDataSizeWithHeader<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    static int BLOCK_MAGIC_INDEX = 0;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    static int ON_DISK_SIZE_WITHOUT_HEADER_INDEX = 8;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    static int UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX = 12;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    static int PREV_BLOCK_OFFSET_INDEX = 16;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    static int CHECKSUM_TYPE_INDEX = 24;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    static int BYTES_PER_CHECKSUM_INDEX = 25;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    static int ON_DISK_DATA_SIZE_WITH_HEADER_INDEX = 29;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>  }<a name="line.135"></a>
+<span class="sourceLineNo">136</span><a name="line.136"></a>
+<span class="sourceLineNo">137</span>  /** Type of block. Header field 0. */<a name="line.137"></a>
+<span class="sourceLineNo">138</span>  private BlockType blockType;<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>  /**<a name="line.140"></a>
+<span class="sourceLineNo">141</span>   * Size on disk excluding header, including checksum. Header field 1.<a name="line.141"></a>
+<span class="sourceLineNo">142</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.142"></a>
+<span class="sourceLineNo">143</span>   */<a name="line.143"></a>
+<span class="sourceLineNo">144</span>  private int onDiskSizeWithoutHeader;<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>  /**<a name="line.146"></a>
+<span class="sourceLineNo">147</span>   * Size of pure data. Does not include header or checksums. Header field 2.<a name="line.147"></a>
+<span class="sourceLineNo">148</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.148"></a>
+<span class="sourceLineNo">149</span>   */<a name="line.149"></a>
+<span class="sourceLineNo">150</span>  private int uncompressedSizeWithoutHeader;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>  /**<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   * The offset of the previous block on disk. Header field 3.<a name="line.153"></a>
+<span class="sourceLineNo">154</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.154"></a>
+<span class="sourceLineNo">155</span>   */<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  private long prevBlockOffset;<a name="line.156"></a>
+<span class="sourceLineNo">157</span><a name="line.157"></a>
+<span class="sourceLineNo">158</span>  /**<a name="line.158"></a>
+<span class="sourceLineNo">159</span>   * Size on disk of header + data. Excludes checksum. Header field 6,<a name="line.159"></a>
+<span class="sourceLineNo">160</span>   * OR calculated from {@link #onDiskSizeWithoutHeader} when using HDFS checksum.<a name="line.160"></a>
+<span class="sourceLineNo">161</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.161"></a>
+<span class="sourceLineNo">162</span>   */<a name="line.162"></a>
+<span class="sourceLineNo">163</span>  private int onDiskDataSizeWithHeader;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>  // End of Block Header fields.<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span>  /**<a name="line.166"></a>
+<span class="sourceLineNo">167</span>   * The in-memory representation of the hfile block. Can be on or offheap. Can be backed by<a name="line.167"></a>
+<span class="sourceLineNo">168</span>   * a single ByteBuffer or by many. Make no assumptions.<a name="line.168"></a>
+<span class="sourceLineNo">169</span>   *<a name="line.169"></a>
+<span class="sourceLineNo">170</span>   * &lt;p&gt;Be careful reading from this &lt;code&gt;buf&lt;/code&gt;. Duplicate and work on the duplicate or if<a name="line.170"></a>
+<span class="sourceLineNo">171</span>   * not, be sure to reset position and limit else trouble down the road.<a name="line.171"></a>
+<span class="sourceLineNo">172</span>   *<a name="line.172"></a>
+<span class="sourceLineNo">173</span>   * &lt;p&gt;TODO: Make this read-only once made.<a name="line.173"></a>
+<span class="sourceLineNo">174</span>   *<a name="line.174"></a>
+<span class="sourceLineNo">175</span>   * &lt;p&gt;We are using the ByteBuff type. ByteBuffer is not extensible yet we need to be able to have<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * a ByteBuffer-like API across multiple ByteBuffers reading from a cache such as BucketCache.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * So, we have this ByteBuff type. Unfortunately, it is spread all about HFileBlock. Would be<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * good if could be confined to cache-use only but hard-to-do.<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   */<a name="line.179"></a>
+<span class="sourceLineNo">180</span>  private ByteBuff buf;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>  /** Meta data that holds meta information on the hfileblock.<a name="line.182"></a>
+<span class="sourceLineNo">183</span>   */<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  private HFileContext fileContext;<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>  /**<a name="line.186"></a>
+<span class="sourceLineNo">187</span>   * The offset of this block in the file. Populated by the reader for<a name="line.187"></a>
+<span class="sourceLineNo">188</span>   * convenience of access. This offset is not part of the block header.<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   */<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  private long offset = UNSET;<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>  private MemoryType memType = MemoryType.EXCLUSIVE;<a name="line.192"></a>
+<span class="sourceLineNo">193</span><a name="line.193"></a>
+<span class="sourceLineNo">194</span>  /**<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * The on-disk size of the next block, including the header and checksums if present.<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   * UNSET if unknown.<a name="line.196"></a>
+<span class="sourceLineNo">197</span>   *<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * Blocks try to carry the size of the next block to read in this data member. Usually<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * we get block sizes from the hfile index but sometimes the index is not available:<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * e.g. when we read the indexes themselves (indexes are stored in blocks, we do not<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * have an index for the indexes). Saves seeks especially around file open when<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * there is a flurry of reading in hfile metadata.<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  private int nextBlockOnDiskSize = UNSET;<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * On a checksum failure, do these many succeeding read requests using hdfs checksums before<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   * auto-reenabling hbase checksum verification.<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   */<a name="line.209"></a>
+<span class="sourceLineNo">210</span>  static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;<a name="line.210"></a>
+<span class="sourceLineNo">211</span><a name="line.211"></a>
+<span class="sourceLineNo">212</span>  private static int UNSET = -1;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>  public static final boolean FILL_HEADER = true;<a name="line.213"></a>
+<span class="sourceLineNo">214</span>  public static final boolean DONT_FILL_HEADER = false;<a name="line.214"></a>
+<span class="sourceLineNo">215</span><a name="line.215"></a>
+<span class="sourceLineNo">216</span>  // How to get the estimate correctly? if it is a singleBB?<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final int MULTI_BYTE_BUFFER_HEAP_SIZE =<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      (int)ClassSize.estimateBase(MultiByteBuff.class, false);<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>  /**<a name="line.220"></a>
+<span class="sourceLineNo">221</span>   * Space for metadata on a block that gets stored along with the block when we cache it.<a name="line.221"></a>
+<span class="sourceLineNo">222</span>   * There are a few bytes stuck on the end of the HFileBlock that we pull in from HDFS.<a name="line.222"></a>
+<span class="sourceLineNo">223</span>   * 8 bytes are for the offset of this block (long) in the file. Offset is important because is is<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * used when we remake the CacheKey when we return block to the cache when done. There is also<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * a flag on whether checksumming is being done by hbase or not. See class comment for note on<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   * uncertain state of checksumming of blocks that come out of cache (should we or should we not?).<a name="line.226"></a>
+<span class="sourceLineNo">227</span>   * Finally there are 4 bytes to hold the length of the next block which can save a seek on<a name="line.227"></a>
+<span class="sourceLineNo">228</span>   * occasion if available.<a name="line.228"></a>
+<span class="sourceLineNo">229</span>   * (This EXTRA info came in with original commit of the bucketcache, HBASE-7404. It was<a name="line.229"></a>
+<span class="sourceLineNo">230</span>   * formerly known as EXTRA_SERIALIZATION_SPACE).<a name="line.230"></a>
+<span class="sourceLineNo">231</span>   */<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  static final int BLOCK_METADATA_SPACE = Bytes.SIZEOF_BYTE + Bytes.SIZEOF_LONG + Bytes.SIZEOF_INT;<a name="line.232"></a>
+<span class="sourceLineNo">233</span><a name="line.233"></a>
+<span class="sourceLineNo">234</span>  /**<a name="line.234"></a>
+<span class="sourceLineNo">235</span>   * Each checksum value is an integer that can be stored in 4 bytes.<a name="line.235"></a>
+<span class="sourceLineNo">236</span>   */<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;<a name="line.237"></a>
+<span class="sourceLineNo">238</span><a name="line.238"></a>
+<span class="sourceLineNo">239</span>  static final byte[] DUMMY_HEADER_NO_CHECKSUM =<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];<a name="line.240"></a>
+<span class="sourceLineNo">241</span><a name="line.241"></a>
+<span class="sourceLineNo">242</span>  /**<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   * Used deserializing blocks from Cache.<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   *<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * &lt;code&gt;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   * ++++++++++++++<a name="line.246"></a>
+<span class="sourceLineNo">247</span>   * + HFileBlock +<a name="line.247"></a>
+<span class="sourceLineNo">248</span>   * ++++++++++++++<a name="line.248"></a>
+<span class="sourceLineNo">249</span>   * + Checksums  + &lt;= Optional<a name="line.249"></a>
+<span class="sourceLineNo">250</span>   * ++++++++++++++<a name="line.250"></a>
+<span class="sourceLineNo">251</span>   * + Metadata!  + &lt;= See note on BLOCK_METADATA_SPACE above.<a name="line.251"></a>
+<span class="sourceLineNo">252</span>   * ++++++++++++++<a name="line.252"></a>
+<span class="sourceLineNo">253</span>   * &lt;/code&gt;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer, boolean)<a name="line.254"></a>
+<span class="sourceLineNo">255</span>   */<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  public static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER = new BlockDeserializer();<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  public static final class BlockDeserializer implements CacheableDeserializer&lt;Cacheable&gt; {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    private BlockDeserializer() {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>    @Override<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        throws IOException {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      // The buf has the file block followed by block metadata.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      ByteBuff newByteBuff;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      if (reuse) {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>        newByteBuff = buf.slice();<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      } else {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        int len = buf.limit();<a name="line.273"></a>
+<span class="sourceLineNo">274</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      buf.position(buf.limit());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      long offset = buf.getLong();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      HFileBlock hFileBlock =<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      return hFileBlock;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>    @Override<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    public int getDeserialiserIdentifier() {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      return DESERIALIZER_IDENTIFIER;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>    @Override<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      // Used only in tests<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  static {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    DESERIALIZER_IDENTIFIER =<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  }<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /**<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   */<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private HFileBlock(HFileBlock that) {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    this(that, false);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * param.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.318"></a>
+<span class="sourceLineNo">319</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    if (bufCopy) {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    } else {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>      this.buf = that.buf.duplicate();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    }<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * is used only while writing blocks and caching,<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.331"></a>
+<span class="sourceLineNo">332</span>   *<a name="line.332"></a>
+<span class="sourceLineNo">333</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.335"></a>
+<span class="sourceLineNo">336</span>   *<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * @param offset the file offset the block was read from<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * @param fileContext HFile meta data<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      HFileContext fileContext) {<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.352"></a>
+<span class="sourceLineNo">353</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    this.buf = new SingleByteBuff(b);<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    if (fillHeader) {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      overwriteHeader();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    this.buf.rewind();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * to that point.<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>    buf.rewind();<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    final BlockType blockType = BlockType.read(buf);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    final int uncompressedSizeWithoutHeader =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    int onDiskDataSizeWithHeader;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    if (usesHBaseChecksum) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    } else {<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    fileContext = fileContextBuilder.build();<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    this.memType = memType;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    this.offset = offset;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    this.buf = buf;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    this.buf.rewind();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  /**<a name="line.405"></a>
+<span class="sourceLineNo">406</span>   * Called from constructors.<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      HFileContext fileContext) {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>    this.blockType = blockType;<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    this.offset = offset;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    this.fileContext = fileContext;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * Parse total on disk size including header and checksum.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   * @return Size of the block with header included.<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      boolean verifyChecksum) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      headerSize(verifyChecksum);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.435"></a>
+<span class="sourceLineNo">436</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.436"></a>
+<span class="sourceLineNo">437</span>   * a read of the next block when scanning or running over a file.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  int getNextBlockOnDiskSize() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>    return nextBlockOnDiskSize;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  public BlockType getBlockType() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    return blockType;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  short getDataBlockEncodingId() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>    }<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    return buf.getShort(headerSize());<a name="line.454"></a>
+<span class="sourceLineNo">455</span>  }<a name="line.455"></a>
+<span class="sourceLineNo">456</span><a name="line.456"></a>
+<span class="sourceLineNo">457</span>  /**<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   * @return the on-disk size of header + data part + checksum.<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   */<a name="line.459"></a>
+<span class="sourceLineNo">460</span>  public int getOnDiskSizeWithHeader() {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.461"></a>
+<span class="sourceLineNo">462</span>  }<a name="line.462"></a>
+<span class="sourceLineNo">463</span><a name="line.463"></a>
+<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
+<span class="sourceLineNo">465</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.465"></a>
+<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  int getOnDiskSizeWithoutHeader() {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return onDiskSizeWithoutHeader;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   int getUncompressedSizeWithoutHeader() {<a name="line.474"></a>
+<span class="sourceLineNo">475</span>    return uncompressedSizeWithoutHeader;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>  }<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>  /**<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   *         -1 if unknown<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   */<a name="line.481"></a>
+<span class="sourceLineNo">482</span>  long getPrevBlockOffset() {<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    return prevBlockOffset;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /**<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * is modified as side-effect.<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   */<a name="line.489"></a>
+<span class="sourceLineNo">490</span>  private void overwriteHeader() {<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    buf.rewind();<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    blockType.write(buf);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    buf.putLong(prevBlockOffset);<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.498"></a>
+<span class="sourceLineNo">499</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    }<a name="line.500"></a>
+<span class="sourceLineNo">501</span>  }<a name="line.501"></a>
+<span class="sourceLineNo">502</span><a name="line.502"></a>
+<span class="sourceLineNo">503</span>  /**<a name="line.503"></a>
+<span class="sourceLineNo">504</span>   * Returns a buffer that does not include the header or checksum.<a name="line.504"></a>
+<span class="sourceLineNo">505</span>   *<a name="line.505"></a>
+<span class="sourceLineNo">506</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.506"></a>
+<span class="sourceLineNo">507</span>   */<a name="line.507"></a>
+<span class="sourceLineNo">508</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.508"></a>
+<span class="sourceLineNo">509</span>    ByteBuff dup = getBufferReadOnly();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    // Now set it up so Buffer spans content only -- no header or no checksums.<a name="line.510"></a>
+<span class="sourceLineNo">511</span>    return dup.position(headerSize()).limit(buf.limit() - totalChecksumBytes()).slice();<a name="line.511"></a>
+<span class="sourceLineNo">512</span>  }<a name="line.512"></a>
+<span class="sourceLineNo">513</span><a name="line.513"></a>
+<span class="sourceLineNo">514</span>  /**<a name="line.514"></a>
+<span class="sourceLineNo">515</span>   * Returns a read-only duplicate of the buffer this block stores internally ready to be read.<a name="line.515"></a>
+<span class="sourceLineNo">516</span>   * Clients must not modify the buffer object though they may set position and limit on the<a name="line.516"></a>
+<span class="sourceLineNo">517</span>   * returned buffer since we pass back a duplicate. This method has to be public because it is used<a name="line.517"></a>
+<span class="sourceLineNo">518</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.518"></a>
+<span class="sourceLineNo">519</span>   * filter lookup, but has to be used with caution. Buffer holds header, block content,<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   * and any follow-on checksums if present.<a name="line.520"></a>
+<span class="sourceLineNo">521</span>   *<a name="line.521"></a>
+<span class="sourceLineNo">522</span>   * @return the buffer of this block for read-only operations<a name="line.522"></a>
+<span class="sourceLineNo">523</span>   */<a name="line.523"></a>
+<span class="sourceLineNo">524</span>  public ByteBuff getBufferReadOnly() {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>    // TODO: ByteBuf does not support asReadOnlyBuffer(). Fix.<a name="line.525"></a>
+<span class="sourceLineNo">526</span>    ByteBuff dup = this.buf.duplicate();<a name="line.526"></a>
+<span class="sourceLineNo">527</span>    assert dup.position() == 0;<a name="line.527"></a>
+<span class="sourceLineNo">528</span>    return dup;<a name="line.528"></a>
+<span class="sourceLineNo">529</span>  }<a name="line.529"></a>
+<span class="sourceLineNo">530</span><a name="line.530"></a>
+<span class="sourceLineNo">531</span>  @VisibleForTesting<a name="line.531"></a>
+<span class="sourceLineNo">532</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.532"></a>
+<span class="sourceLineNo">533</span>      String fieldName) throws IOException {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>    if (valueFromBuf != valueFromField) {<a name="line.534"></a>
+<span class="sourceLineNo">535</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.536"></a>
+<span class="sourceLineNo">537</span>    }<a name="line.537"></a>
+<span class="sourceLineNo">538</span>  }<a name="line.538"></a>
+<span class="sourceLineNo">539</span><a name="line.539"></a>
+<span class="sourceLineNo">540</span>  @VisibleForTesting<a name="line.540"></a>
+<span class="sourceLineNo">541</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      throws IOException {<a name="line.542"></a>
+<span class="sourceLineNo">543</span>    if (valueFromBuf != valueFromField) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.544"></a>
+<span class="sourceLineNo">545</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
+<span class="sourceLineNo">548</span><a name="line.548"></a>
+<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
+<span class="sourceLineNo">550</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.550"></a>
+<span class="sourceLineNo">551</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.551"></a>
+<span class="sourceLineNo">552</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.552"></a>
+<span class="sourceLineNo">553</span>   * This function is primary for testing and debugging, and is not<a name="line.553"></a>
+<span class="sourceLineNo">554</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.554"></a>
+<span class="sourceLineNo">555</span>   * Used by tests only.<a name="line.555"></a>
+<span class="sourceLineNo">556</span>   */<a name="line.556"></a>
+<span class="sourceLineNo">557</span>  @VisibleForTesting<a name="line.557"></a>
+<span class="sourceLineNo">558</span>  void sanityCheck() throws IOException {<a name="line.558"></a>
+<span class="sourceLineNo">559</span>    // Duplicate so no side-effects<a name="line.559"></a>
+<span class="sourceLineNo">560</span>    ByteBuff dup = this.buf.duplicate().rewind();<a name="line.560"></a>
+<span class="sourceLineNo">561</span>    sanityCheckAssertion(BlockType.read(dup), blockType);<a name="line.561"></a>
+<span class="sourceLineNo">562</span><a name="line.562"></a>
+<span class="sourceLineNo">563</span>    sanityCheckAssertion(dup.getInt(), onDiskSizeWithoutHeader, "onDiskSizeWithoutHeader");<a name="line.563"></a>
+<span class="sourceLineNo">564</span><a name="line.564"></a>
+<span class="sourceLineNo">565</span>    sanityCheckAssertion(dup.getInt(), uncompressedSizeWithoutHeader,<a name="line.565"></a>
+<span class="sourceLineNo">566</span>        "uncompressedSizeWithoutHeader");<a name="line.566"></a>
+<span class="sourceLineNo">567</span><a name="line.567"></a>
+<span class="sourceLineNo">568</span>    sanityCheckAssertion(dup.getLong(), prevBlockOffset, "prevBlockOffset");<a name="line.568"></a>
+<span class="sourceLineNo">569</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.569"></a>
+<span class="sourceLineNo">570</span>      sanityCheckAssertion(dup.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.570"></a>
+<span class="sourceLineNo">571</span>      sanityCheckAssertion(dup.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.571"></a>
+<span class="sourceLineNo">572</span>          "bytesPerChecksum");<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      sanityCheckAssertion(dup.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.573"></a>
+<span class="sourceLineNo">574</span>    }<a name="line.574"></a>
+<span class="sourceLineNo">575</span><a name="line.575"></a>
+<span class="sourceLineNo">576</span>    int cksumBytes = totalChecksumBytes();<a name="line.576"></a>
+<span class="sourceLineNo">577</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.577"></a>
+<span class="sourceLineNo">578</span>    if (dup.limit() != expectedBufLimit) {<a name="line.578"></a>
+<span class="sourceLineNo">579</span>      throw new AssertionError("Expected limit " + expectedBufLimit + ", got " + dup.limit());<a name="line.579"></a>
+<span class="sourceLineNo">580</span>    }<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.582"></a>
+<span class="sourceLineNo">583</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.583"></a>
+<span class="sourceLineNo">584</span>    int hdrSize = headerSize();<a name="line.584"></a>
+<span class="sourceLineNo">585</span>    if (dup.capacity() != expectedBufLimit &amp;&amp; dup.capacity() != expectedBufLimit + hdrSize) {<a name="line.585"></a>
+<span class="sourceLineNo">586</span>      throw new AssertionError("Invalid buffer capacity: " + dup.capacity() +<a name="line.586"></a>
+<span class="sourceLineNo">587</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.587"></a>
+<span class="sourceLineNo">588</span>    }<a name="line.588"></a>
+<span class="sourceLineNo">589</span>  }<a name="line.589"></a>
+<span class="sourceLineNo">590</span><a name="line.590"></a>
+<span class="sourceLineNo">591</span>  @Override<a name="line.591"></a>
+<span class="sourceLineNo">592</span>  public String toString() {<a name="line.592"></a>
+<span class="sourceLineNo">593</span>    StringBuilder sb = new StringBuilder()<a name="line.593"></a>
+<span class="sourceLineNo">594</span>      .append("[")<a name="line.594"></a>
+<span class="sourceLineNo">595</span>      .append("blockType=").append(blockType)<a name="line.595"></a>
+<span class="sourceLineNo">596</span>      .append(", fileOffset=").append(offset)<a name="line.596"></a>
+<span class="sourceLineNo">597</span>      .append(", headerSize=").append(headerSize())<a name="line.597"></a>
+<span class="sourceLineNo">598</span>      .append(", onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      .append(", uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.599"></a>
+<span class="sourceLineNo">600</span>      .append(", prevBlockOffset=").append(prevBlockOffset)<a name="line.600"></a>
+<span class="sourceLineNo">601</span>      .append(", isUseHBaseChecksum=").append(fileContext.isUseHBaseChecksum());<a name="line.601"></a>
+<span class="sourceLineNo">602</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      sb.append(", checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.603"></a>
+<span class="sourceLineNo">604</span>        .append(", bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.604"></a>
+<span class="sourceLineNo">605</span>        .append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    } else {<a name="line.606"></a>
+<span class="sourceLineNo">607</span>      sb.append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.607"></a>
+<span class="sourceLineNo">608</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.608"></a>
+<span class="sourceLineNo">609</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.609"></a>
+<span class="sourceLineNo">610</span>    }<a name="line.610"></a>
+<span class="sourceLineNo">611</span>    String dataBegin = null;<a name="line.611"></a>
+<span class="sourceLineNo">612</span>    if (buf.hasArray()) {<a name="line.612"></a>
+<span class="sourceLineNo">613</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.613"></a>
+<span class="sourceLineNo">614</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.614"></a>
+<span class="sourceLineNo">615</span>    } else {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.618"></a>
+<span class="sourceLineNo">619</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.619"></a>
+<span class="sourceLineNo">620</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.620"></a>
+<span class="sourceLineNo">621</span>    }<a name="line.621"></a>
+<span class="sourceLineNo">622</span>    sb.append(", getOnDiskSizeWithHeader=").append(getOnDiskSizeWithHeader())<a name="line.622"></a>
+<span class="sourceLineNo">623</span>      .append(", totalChecksumBytes=").append(totalChecksumBytes())<a name="line.623"></a>
+<span class="sourceLineNo">624</span>      .append(", isUnpacked=").append(isUnpacked())<a name="line.624"></a>
+<span class="sourceLineNo">625</span>      .append(", buf=[").append(buf).append("]")<a name="line.625"></a>
+<span class="sourceLineNo">626</span>      .append(", dataBeginsWith=").append(dataBegin)<a name="line.626"></a>
+<span class="sourceLineNo">627</span>      .append(", fileContext=").append(fileContext)<a name="line.627"></a>
+<span class="sourceLineNo">628</span>      .append(", nextBlockOnDiskSize=").append(nextBlockOnDiskSize)<a name="line.628"></a>
+<span class="sourceLineNo">629</span>      .append("]");<a name="line.629"></a>
+<span class="sourceLineNo">630</span>    return sb.toString();<a name="line.630"></a>
+<span class="sourceLineNo">631</span>  }<a name="line.631"></a>
+<span class="sourceLineNo">632</span><a name="line.632"></a>
+<span class="sourceLineNo">633</span>  /**<a name="line.633"></a>
+<span class="sourceLineNo">634</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.634"></a>
+<span class="sourceLineNo">635</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.635"></a>
+<span class="sourceLineNo">636</span>   */<a name="line.636"></a>
+<span class="sourceLineNo">637</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.637"></a>
+<span class="sourceLineNo">638</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.640"></a>
+<span class="sourceLineNo">641</span>      // encryption details.<a name="line.641"></a>
+<span class="sourceLineNo">642</span>      return this;<a name="line.642"></a>
+<span class="sourceLineNo">643</span>    }<a name="line.643"></a>
+<span class="sourceLineNo">644</span><a name="line.644"></a>
+<span class="sourceLineNo">645</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.645"></a>
+<span class="sourceLineNo">646</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.646"></a>
+<span class="sourceLineNo">647</span><a name="line.647"></a>
+<span class="sourceLineNo">648</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();<a name="line.649"></a>
+<span class="sourceLineNo">650</span><a name="line.650"></a>
+<span class="sourceLineNo">651</span>    ByteBuff dup = this.buf.duplicate();<a name="line.651"></a>
+<span class="sourceLineNo">652</span>    dup.position(this.headerSize());<a name="line.652"></a>
+<span class="sourceLineNo">653</span>    dup = dup.slice();<a name="line.653"></a>
+<span class="sourceLineNo">654</span>    ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),<a name="line.654"></a>
+<span class="sourceLineNo">655</span>      unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),<a name="line.655"></a>
+<span class="sourceLineNo">656</span>      dup);<a name="line.656"></a>
+<span class="sourceLineNo">657</span>    return unpacked;<a name="line.657"></a>
+<span class="sourceLineNo">658</span>  }<a name="line.658"></a>
+<span class="sourceLineNo">659</span><a name="line.659"></a>
+<span class="sourceLineNo">660</span>  /**<a name="line.660"></a>
+<span class="sourceLineNo">661</span>   * Always allocates a new buffer of the correct size. Copies header bytes<a name="line.661"></a>
+<span class="sourceLineNo">662</span>   * from the existing buffer. Does not change header fields.<a name="line.662"></a>
+<span class="sourceLineNo">663</span>   * Reserve room to keep checksum bytes too.<a name="line.663"></a>
+<span class="sourceLineNo">664</span>   */<a name="line.664"></a>
+<span class="sourceLineNo">665</span>  private void allocateBuffer() {<a name="line.665"></a>
+<span class="sourceLineNo">666</span>    int cksumBytes = totalChecksumBytes();<a name="line.666"></a>
+<span class="sourceLineNo">667</span>    int headerSize = headerSize();<a name="line.667"></a>
+<span class="sourceLineNo">668</span>    int capacityNeeded = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.668"></a>
+<span class="sourceLineNo">669</span><a name="line.669"></a>
+<span class="sourceLineNo">670</span>    // TODO we need consider allocating offheap here?<a name="line.670"></a>
+<span class="sourceLineNo">671</span>    ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);<a name="line.671"></a>
+<span class="sourceLineNo">672</span><a name="line.672"></a>
+<span class="sourceLineNo">673</span>    // Copy header bytes into newBuf.<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    // newBuf is HBB so no issue in calling array()<a name="line.674"></a>
+<span class="sourceLineNo">675</span>    buf.position(0);<a name="line.675"></a>
+<span class="sourceLineNo">676</span>    buf.get(newBuf.array(), newBuf.arrayOffset(), headerSize);<a name="line.676"></a>
+<span class="sourceLineNo">677</span><a name="line.677"></a>
+<span class="sourceLineNo">678</span>    buf = new SingleByteBuff(newBuf);<a name="line.678"></a>
+<span class="sourceLineNo">679</span>    // set limit to exclude next block's header<a name="line.679"></a>
+<span class="sourceLineNo">680</span>    buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);<a name="line.680"></a>
+<span class="sourceLineNo">681</span>  }<a name="line.681"></a>
+<span class="sourceLineNo">682</span><a name="line.682"></a>
+<span class="sourceLineNo">683</span>  /**<a name="line.683"></a>
+<span class="sourceLineNo">684</span>   * Return true when this block's buffer has been unpacked, false otherwise. Note this is a<a name="line.684"></a>
+<span class="sourceLineNo">685</span>   * calculated heuristic, not tracked attribute of the block.<a name="line.685"></a>
+<span class="sourceLineNo">686</span>   */<a name="line.686"></a>
+<span class="sourceLineNo">687</span>  public boolean isUnpacked() {<a name="line.687"></a>
+<span class="sourceLineNo">688</span>    final int cksumBytes = totalChecksumBytes();<a name="line.688"></a>
+<span class="sourceLineNo">689</span>    final int headerSize = headerSize();<a name="line.689"></a>
+<span class="sourceLineNo">690</span>    final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.690"></a>
+<span class="sourceLineNo">691</span>    final int bufCapacity = buf.capacity();<a name="line.691"></a>
+<span class="sourceLineNo">692</span>    return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;<a name="line.692"></a>
+<span class="sourceLineNo">693</span>  }<a name="line.693"></a>
+<span class="sourceLineNo">694</span><a name="line.694"></a>
+<span class="sourceLineNo">695</span>  /** An additional sanity-check in case no compression or encryption is being used. */<a name="line.695"></a>
+<span class="sourceLineNo">696</span>  @VisibleForTesting<a name="line.696"></a>
+<span class="sourceLineNo">697</span>  void sanityCheckUncompressedSize() throws IOException {<a name="line.697"></a>
+<span class="sourceLineNo">698</span>    if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader + totalChecksumBytes()) {<a name="line.698"></a>
+<span class="sourceLineNo">699</span>      throw new IOException("Using no compression but "<a name="line.699"></a>
+<span class="sourceLineNo">700</span>          + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "<a name="line.700"></a>
+<span class="sourceLineNo">701</span>          + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader<a name="line.701"></a>
+<span class="sourceLineNo">702</span>          + ", numChecksumbytes=" + totalChecksumBytes());<a name="line.702"></a>
+<span class="sourceLineNo">703</span>    }<a name="line.703"></a>
+<span class="sourceLineNo">704</span>  }<a name="line.704"></a>
+<span class="sourceLineNo">705</span><a name="line.705"></a>
+<span class="sourceLineNo">706</span>  /**<a name="line.706"></a>
+<span class="sourceLineNo">707</span>   * Cannot be {@link #UNSET}. Must be a legitimate value. Used re-making the {@link BlockCacheKey} when<a name="line.707"></a>
+<span class="sourceLineNo">708</span>   * block is returned to the cache.<a name="line.708"></a>
+<span class="sourceLineNo">709</span>   * @return the offset of this block in the file it was read from<a name="line.709"></a>
+<span class="sourceLineNo">710</span>   */<a name="line.710"></a>
+<span class="sourceLineNo">711</span>  long getOffset() {<a name="line.711"></a>
+<span class="sourceLineNo">712</span>    if (offset &lt; 0) {<a name="line.712"></a>
+<span class="sourceLineNo">713</span>      throw new IllegalStateException("HFile block offset not initialized properly");<a name="line.713"></a>
+<span class="sourceLineNo">714</span>    }<a name="line.714"></a>
+<span class="sourceLineNo">715</span>    return offset;<a name="line.715"></a>
+<span class="sourceLineNo">716</span>  }<a name="line.716"></a>
+<span class="sourceLineNo">717</span><a name="line.717"></a>
+<span class="sourceLineNo">718</span>  /**<a name="line.718"></a>
+<span class="sourceLineNo">719</span>   * @return a byte stream reading the data + checksum of this block<a name="line.719"></a>
+<span class="sourceLineNo">720</span>   */<a name="line.720"></a>
+<span class="sourceLineNo">721</span>  DataInputStream getByteStream() {<a name="line.721"></a>
+<span class="sourceLineNo">722</span>    ByteBuff dup = this.buf.duplicate();<a name="line.722"></a>
+<span class="sourceLineNo">723</span>    dup.position(this.headerSize());<a name="line.723"></a>
+<span class="sourceLineNo">724</span>    return new DataInputStream(new ByteBuffInputStream(dup));<a name="line.724"></a>
+<span class="sourceLineNo">725</span>  }<a name="line.725"></a>
+<span class="sourceLineNo">726</span><a name="line.726"></a>
+<span class="sourceLineNo">727</span>  @Override<a name="line.727"></a>
+<span class="sourceLineNo">728</span>  public long heapSize() {<a name="line.728"></a>
+<span class="sourceLineNo">729</span>    long size = ClassSize.align(<a name="line.729"></a>
+<span class="sourceLineNo">730</span>        ClassSize.OBJECT +<a name="line.730"></a>
+<span class="sourceLineNo">731</span>        // Block type, multi byte buffer, MemoryType and meta references<a name="line.731"></a>
+<span class="sourceLineNo">732</span>        4 * ClassSize.REFERENCE +<a name="line.732"></a>
+<span class="sourceLineNo">733</span>        // On-disk size, uncompressed size, and next block's on-disk size<a name="line.733"></a>
+<span class="sourceLineNo">734</span>        // bytePerChecksum and onDiskDataSize<a name="line.734"></a>
+<span class="sourceLineNo">735</span>        4 * Bytes.SIZEOF_INT +<a name="line.735"></a>
+<span class="sourceLineNo">736</span>        // This and previous block offset<a name="line.736"></a>
+<span class="sourceLineNo">737</span>        2 * Bytes.SIZEOF_LONG +<a name="line.737"></a>
+<span class="sourceLineNo">738</span>        // Heap size of the meta object. meta will be always not null.<a name="line.738"></a>
+<span class="sourceLineNo">739</span>        fileContext.heapSize()<a name="line.739"></a>
+<span class="sourceLineNo">740</span>    );<a name="line.740"></a>
+<span class="sourceLineNo">741</span><a name="line.741"></a>
+<span class="sourceLineNo">742</span>    if (buf != null) {<a name="line.742"></a>
+<span class="sourceLineNo">743</span>      // Deep overhead of the byte buffer. Needs to be aligned separately.<a name="line.743"></a>
+<span class="sourceLineNo">744</span>      size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);<a name="line.744"></a>
+<span class="sourceLineNo">745</span>    }<a name="line.745"></a>
+<span class="sourceLineNo">746</span><a name="line.746"></a>
+<span class="sourceLineNo">747</span>    return ClassSize.align(size);<a name="line.747"></a>
+<span class="sourceLineNo">748</span>  }<a name="line.748"></a>
+<span class="sourceLineNo">749</span><a name="line.749"></a>
+<span class="sourceLineNo">750</span>  /**<a name="line.750"></a>
+<span class="sourceLineNo">751</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.751"></a>
+<span class="sourceLineNo">752</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.752"></a>
+<span class="sourceLineNo">753</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but specifies a<a name="line.753"></a>
+<span class="sourceLineNo">754</span>   * number of "extra" bytes to also optionally read.<a name="line.754"></a>
+<span class="sourceLineNo">755</span>   *<a name="line.755"></a>
+<span class="sourceLineNo">756</span>   * @param in the input stream to read from<a name="line.756"></a>
+<span class="sourceLineNo">757</span>   * @param buf the buffer to read into<a name="line.757"></a>
+<span class="sourceLineNo">758</span>   * @param bufOffset the destination offset in the buffer<a name="line.758"></a>
+<span class="sourceLineNo">759</span>   * @param necessaryLen the number of bytes that are absolutely necessary to read<a name="line.759"></a>
+<span class="sourceLineNo">760</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.760"></a>
+<span class="sourceLineNo">761</span>   * @return true if succeeded reading the extra bytes<a name="line.761"></a>
+<span class="sourceLineNo">762</span>   * @throws IOException if failed to read the necessary bytes<a name="line.762"></a>
+<span class="sourceLineNo">763</span>   */<a name="line.763"></a>
+<span class="sourceLineNo">764</span>  static boolean readWithExtra(InputStream in, byte[] buf,<a name="line.764"></a>
+<span class="sourceLineNo">765</span>      int bufOffset, int necessaryLen, int extraLen) throws IOException {<a name="line.765"></a>
+<span class="sourceLineNo">766</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.766"></a>
+<span class="sourceLineNo">767</span>    while (bytesRemaining &gt; 0) {<a name="line.767"></a>
+<span class="sourceLineNo">768</span>      int ret = in.read(buf, bufOffset, bytesRemaining);<a name="line.768"></a>
+<span class="sourceLineNo">769</span>      if (ret == -1 &amp;&amp; bytesRemaining &lt;= extraLen) {<a name="line.769"></a>
+<span class="sourceLineNo">770</span>        // We could not read the "extra data", but that is OK.<a name="line.770"></a>
+<span class="sourceLineNo">771</span>        break;<a name="line.771"></a>
+<span class="sourceLineNo">772</span>      }<a name="line.772"></a>
+<span class="sourceLineNo">773</span>      if (ret &lt; 0) {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>        throw new IOException("Premature EOF from inputStream (read "<a name="line.774"></a>
+<span class="sourceLineNo">775</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.775"></a>
+<span class="sourceLineNo">776</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.776"></a>
+<span class="sourceLineNo">777</span>            + "successfully read "<a name="line.777"></a>
+<span class="sourceLineNo">778</span>            + (necessaryLen + extraLen - bytesRemaining));<a name="line.778"></a>
+<span class="sourceLineNo">779</span>      }<a name="line.779"></a>
+<span class="sourceLineNo">780</span>      bufOffset += ret;<a name="line.780"></a>
+<span class="sourceLineNo">781</span>      bytesRemaining -= ret;<a name="line.781"></a>
+<span class="sourceLineNo">782</span>    }<a name="line.782"></a>
+<span class="sourceLineNo">783</span>    return bytesRemaining &lt;= 0;<a name="line.783"></a>
+<span class="sourceLineNo">784</span>  }<a name="line.784"></a>
+<span class="sourceLineNo">785</span><a name="line.785"></a>
+<span class="sourceLineNo">786</span>  /**<a name="line.786"></a>
+<span class="sourceLineNo">787</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.787"></a>
+<span class="sourceLineNo">788</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.788"></a>
+<span class="sourceLineNo">789</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but uses<a name="line.789"></a>
+<span class="sourceLineNo">790</span>   * positional read and specifies a number of "extra" bytes that would be<a name="line.790"></a>
+<span class="sourceLineNo">791</span>   * desirable but not absolutely necessary to read.<a name="line.791"></a>
+<span class="sourceLineNo">792</span>   *<a name="line.792"></a>
+<span class="sourceLineNo">793</span>   * @param in the input stream to read from<a name="line.793"></a>
+<span class="sourceLineNo">794</span>   * @param position the position within the stream from which to start reading<a name="line.794"></a>
+<span class="sourceLineNo">795</span>   * @param buf the buffer to read into<a name="line.795"></a>
+<span class="sourceLineNo">796</span>   * @param bufOffset the destination offset in the buffer<a name="line.796"></a>
+<span class="sourceLineNo">797</span>   * @param necessaryLen the number of bytes that are absolutely necessary to<a name="line.797"></a>
+<span class="sourceLineNo">798</span>   *     read<a name="line.798"></a>
+<span class="sourceLineNo">799</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.799"></a>
+<span class="sourceLineNo">800</span>   * @return true if and only if extraLen is &gt; 0 and reading those extra bytes<a name="line.800"></a>
+<span class="sourceLineNo">801</span>   *     was successful<a name="line.801"></a>
+<span class="sourceLineNo">802</span>   * @throws IOException if failed to read the necessary bytes<a name="line.802"></a>
+<span class="sourceLineNo">803</span>   */<a name="line.803"></a>
+<span class="sourceLineNo">804</span>  @VisibleForTesting<a name="line.804"></a>
+<span class="sourceLineNo">805</span>  static boolean positionalReadWithExtra(FSDataInputStream in,<a name="line.805"></a>
+<span class="sourceLineNo">806</span>      long position, byte[] buf, int bufOffset, int necessaryLen, int extraLen)<a name="line.806"></a>
+<span class="sourceLineNo">807</span>      throws IOException {<a name="line.807"></a>
+<span class="sourceLineNo">808</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.808"></a>
+<span class="sourceLineNo">809</span>    int bytesRead = 0;<a name="line.809"></a>
+<span class="sourceLineNo">810</span>    while (bytesRead &lt; necessaryLen) {<a name="line.810"></a>
+<span class="sourceLineNo">811</span>      int ret = in.read(position, buf, bufOffset, bytesRemaining);<a name="line.811"></a>
+<span class="sourceLineNo">812</span>      if (ret &lt; 0) {<a name="line.812"></a>
+<span class="sourceLineNo">813</span>        throw new IOException("Premature EOF from inputStream (positional read "<a name="line.813"></a>
+<span class="sourceLineNo">814</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.814"></a>
+<span class="sourceLineNo">815</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.815"></a>
+<span class="sourceLineNo">816</span>            + "successfully read " + bytesRead);<a name="line.816"></a>
+<span class="sourceLineNo">817</span>      }<a name="line.817"></a>
+<span class="sourceLineNo">818</span>      position += ret;<a name="line.818"></a>
+<span class="sourceLineNo">819</span>      bufOffset += ret;<a name="line.819"></a>
+<span class="sourceLineNo">820</span>      bytesRemaining -= ret;<a name="line.820"></a>
+<span class="sourceLineNo">821</span>      bytesRead += ret;<a name="line.821"></a>
+<span class="sourceLineNo">822</span>    }<a name="line.822"></a>
+<span class="sourceLineNo">823</span>    return bytesRead != necessaryLen &amp;&amp; bytesRemaining &lt;= 0;<a name="line.823"></a>
+<span class="sourceLineNo">824</span>  }<a name="line.824"></a>
+<span class="sourceLineNo">825</span><a name="line.825"></a>
+<span class="sourceLineNo">826</span>  /**<a name="line.826"></a>
+<span class="sourceLineNo">827</span>   * Unified version 2 {@link HFile} block writer. The intended usage pattern<a name="line.827"></a>
+<span class="sourceLineNo">828</span>   * is as follows:<a name="line.828"></a>
+<span class="sourceLineNo">829</span>   * &lt;ol&gt;<a name="line.829"></a>
+<span class="sourceLineNo">830</span>   * &lt;li&gt;Construct an {@link HFileBlock.Writer}, providing a compression algorithm.<a name="line.830"></a>
+<span class="sourceLineNo">831</span>   * &lt;li&gt;Call {@link Writer#startWriting} and get a data stream to write to.<a name="line.831"></a>
+<span class="sourceLineNo">832</span>   * &lt;li&gt;Write your data into the stream.<a name="line.832"></a>
+<span class="sourceLineNo">833</span>   * &lt;li&gt;Call Writer#writeHeaderAndData(FSDataOutputStream) as many times as you need to.<a name="line.833"></a>
+<span class="sourceLineNo">834</span>   * store the serialized block into an external stream.<a name="line.834"></a>
+<span class="sourceLineNo">835</span>   * &lt;li&gt;Repeat to write more blocks.<a name="line.835"></a>
+<span class="sourceLineNo">836</span>   * &lt;/ol&gt;<a name="line.836"></a>
+<span class="sourceLineNo">837</span>   * &lt;p&gt;<a name="line.837"></a>
+<span class="sourceLineNo">838</span>   */<a name="line.838"></a>
+<span class="sourceLineNo">839</span>  static class Writer {<a name="line.839"></a>
+<span class="sourceLineNo">840</span>    private enum State {<a name="line.840"></a>
+<span class="sourceLineNo">841</span>      INIT,<a name="line.841"></a>
+<span class="sourceLineNo">842</span>      WRITING,<a name="line.842"></a>
+<span class="sourceLineNo">843</span>      BLOCK_READY<a name="line.843"></a>
+<span class="sourceLineNo">844</span>    }<a name="line.844"></a>
+<span class="sourceLineNo">845</span><a name="line.845"></a>
+<span class="sourceLineNo">846</span>    /** Writer state. Used to ensure the correct usage protocol. */<a name="line.846"></a>
+<span class="sourceLineNo">847</span>    private State state = State.INIT;<a name="line.847"></a>
+<span class="sourceLineNo">848</span><a name="line.848"></a>
+<span class="sourceLineNo">849</span>    /** Data block encoder used for data blocks */<a name="line.849"></a>
+<span class="sourceLineNo">850</span>    private final HFileDataBlockEncoder dataBlockEncoder;<a name="line.850"></a>
+<span class="sourceLineNo">851</span><a name="line.851"></a>
+<span class="sourceLineNo">852</span>    private HFileBlockEncodingContext dataBlockEncodingCtx;<a name="line.852"></a>
+<span class="sourceLineNo">853</span><a name="line.853"></a>
+<span class="sourceLineNo">854</span>    /** block encoding context for non-data blocks*/<a name="line.854"></a>
+<span class="sourceLineNo">855</span>    private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;<a name="line.855"></a>
+<span class="sourceLineNo">856</span><a name="line.856"></a>
+<span class="sourceLineNo">857</span>    /**<a name="line.857"></a>
+<span class="sourceLineNo">858</span>     * The stream we use to accumulate data into a blo

<TRUNCATED>

[16/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
index bd3c59e..21e240a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
@@ -33,62 +33,62 @@
 <span class="sourceLineNo">025</span>import java.io.FileNotFoundException;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.io.FileOutputStream;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.io.IOException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.io.ObjectInputStream;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.io.ObjectOutputStream;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.io.Serializable;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import java.nio.ByteBuffer;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.util.ArrayList;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.util.Comparator;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.util.HashSet;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.util.Iterator;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import java.util.List;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import java.util.Map;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import java.util.NavigableSet;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import java.util.PriorityQueue;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import java.util.Set;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import java.util.concurrent.BlockingQueue;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import java.util.concurrent.ConcurrentMap;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.TimeUnit;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import java.util.concurrent.atomic.LongAdder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import java.util.concurrent.locks.Lock;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.conf.Configuration;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.util.StringUtils;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.slf4j.Logger;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.slf4j.LoggerFactory;<a name="line.79"></a>
-<span class="sourceLineNo">080</span><a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.83"></a>
+<span class="sourceLineNo">028</span>import java.io.Serializable;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.nio.ByteBuffer;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.ArrayList;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.Comparator;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import java.util.HashSet;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.Iterator;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.List;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.Map;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.NavigableSet;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import java.util.PriorityQueue;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import java.util.Set;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import java.util.concurrent.BlockingQueue;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import java.util.concurrent.ConcurrentMap;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import java.util.concurrent.Executors;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.TimeUnit;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.LongAdder;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.locks.Lock;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.conf.Configuration;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.protobuf.ProtobufMagic;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.util.StringUtils;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.Logger;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.slf4j.LoggerFactory;<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos;<a name="line.83"></a>
 <span class="sourceLineNo">084</span><a name="line.84"></a>
 <span class="sourceLineNo">085</span>/**<a name="line.85"></a>
 <span class="sourceLineNo">086</span> * BucketCache uses {@link BucketAllocator} to allocate/free blocks, and uses<a name="line.86"></a>
@@ -172,1540 +172,1557 @@
 <span class="sourceLineNo">164</span>  private volatile boolean freeInProgress = false;<a name="line.164"></a>
 <span class="sourceLineNo">165</span>  private final Lock freeSpaceLock = new ReentrantLock();<a name="line.165"></a>
 <span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  private UniqueIndexMap&lt;Integer&gt; deserialiserMap = new UniqueIndexMap&lt;&gt;();<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  private final LongAdder heapSize = new LongAdder();<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  /** Current number of cached elements */<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.172"></a>
-<span class="sourceLineNo">173</span><a name="line.173"></a>
-<span class="sourceLineNo">174</span>  /** Cache access count (sequential ID) */<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  boolean wait_when_cache = false;<a name="line.181"></a>
+<span class="sourceLineNo">167</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  private final LongAdder heapSize = new LongAdder();<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  /** Current number of cached elements */<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>  /** Cache access count (sequential ID) */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.176"></a>
+<span class="sourceLineNo">177</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.177"></a>
+<span class="sourceLineNo">178</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.178"></a>
+<span class="sourceLineNo">179</span>  boolean wait_when_cache = false;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.181"></a>
 <span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  private final String persistencePath;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  private final long cacheCapacity;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /** Approximate block size */<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  private final long blockSize;<a name="line.188"></a>
-<span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  private final int ioErrorsTolerationDuration;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  // 1 min<a name="line.192"></a>
-<span class="sourceLineNo">193</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.193"></a>
-<span class="sourceLineNo">194</span><a name="line.194"></a>
-<span class="sourceLineNo">195</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  // reset after a successful read/write.<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private volatile long ioErrorStartTime = -1;<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * &lt;p&gt;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   */<a name="line.204"></a>
-<span class="sourceLineNo">205</span>  @VisibleForTesting<a name="line.205"></a>
-<span class="sourceLineNo">206</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        @Override<a name="line.210"></a>
-<span class="sourceLineNo">211</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.212"></a>
-<span class="sourceLineNo">213</span>          if (nameComparison != 0) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            return nameComparison;<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>          if (a.getOffset() == b.getOffset()) {<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            return 0;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            return -1;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>          }<a name="line.221"></a>
-<span class="sourceLineNo">222</span>          return 1;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      });<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.226"></a>
-<span class="sourceLineNo">227</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  // Allocate or free space for the block<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private BucketAllocator bucketAllocator;<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  private float acceptableFactor;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private float minFactor;<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private float extraFreeFactor;<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /** Single access bucket size */<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  private float singleFactor;<a name="line.243"></a>
-<span class="sourceLineNo">244</span><a name="line.244"></a>
-<span class="sourceLineNo">245</span>  /** Multiple access bucket size */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private float multiFactor;<a name="line.246"></a>
-<span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  /** In-memory bucket size */<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private float memoryFactor;<a name="line.249"></a>
-<span class="sourceLineNo">250</span><a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      IOException {<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  }<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.258"></a>
-<span class="sourceLineNo">259</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.259"></a>
-<span class="sourceLineNo">260</span>                     Configuration conf)<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      throws FileNotFoundException, IOException {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    long blockNumCapacity = capacity / blockSize;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      // Enough for about 32TB of cache!<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
-<span class="sourceLineNo">269</span><a name="line.269"></a>
-<span class="sourceLineNo">270</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.275"></a>
+<span class="sourceLineNo">183</span>  private final String persistencePath;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  private final long cacheCapacity;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  /** Approximate block size */<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  private final long blockSize;<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  private final int ioErrorsTolerationDuration;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  // 1 min<a name="line.190"></a>
+<span class="sourceLineNo">191</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  // reset after a successful read/write.<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  private volatile long ioErrorStartTime = -1;<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;p&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   */<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  @VisibleForTesting<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.207"></a>
+<span class="sourceLineNo">208</span>        @Override<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.210"></a>
+<span class="sourceLineNo">211</span>          if (nameComparison != 0) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>            return nameComparison;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>          }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>          if (a.getOffset() == b.getOffset()) {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>            return 0;<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>            return -1;<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>          return 1;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        }<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      });<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.224"></a>
+<span class="sourceLineNo">225</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>  // Allocate or free space for the block<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private BucketAllocator bucketAllocator;<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  private float acceptableFactor;<a name="line.232"></a>
+<span class="sourceLineNo">233</span><a name="line.233"></a>
+<span class="sourceLineNo">234</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  private float minFactor;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private float extraFreeFactor;<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>  /** Single access bucket size */<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  private float singleFactor;<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /** Multiple access bucket size */<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private float multiFactor;<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>  /** In-memory bucket size */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private float memoryFactor;<a name="line.247"></a>
+<span class="sourceLineNo">248</span><a name="line.248"></a>
+<span class="sourceLineNo">249</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      IOException {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  }<a name="line.254"></a>
+<span class="sourceLineNo">255</span><a name="line.255"></a>
+<span class="sourceLineNo">256</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.256"></a>
+<span class="sourceLineNo">257</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.257"></a>
+<span class="sourceLineNo">258</span>                     Configuration conf)<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      throws FileNotFoundException, IOException {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    long blockNumCapacity = capacity / blockSize;<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      // Enough for about 32TB of cache!<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
+<span class="sourceLineNo">267</span><a name="line.267"></a>
+<span class="sourceLineNo">268</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>    sanityCheckConfigs();<a name="line.275"></a>
 <span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    sanityCheckConfigs();<a name="line.277"></a>
-<span class="sourceLineNo">278</span><a name="line.278"></a>
-<span class="sourceLineNo">279</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.280"></a>
-<span class="sourceLineNo">281</span>        ", memoryFactor: " + memoryFactor);<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    this.cacheCapacity = capacity;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    this.persistencePath = persistencePath;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    this.blockSize = blockSize;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.286"></a>
-<span class="sourceLineNo">287</span><a name="line.287"></a>
-<span class="sourceLineNo">288</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>    assert writerQueues.size() == writerThreads.length;<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.294"></a>
+<span class="sourceLineNo">277</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.277"></a>
+<span class="sourceLineNo">278</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.278"></a>
+<span class="sourceLineNo">279</span>        ", memoryFactor: " + memoryFactor);<a name="line.279"></a>
+<span class="sourceLineNo">280</span><a name="line.280"></a>
+<span class="sourceLineNo">281</span>    this.cacheCapacity = capacity;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    this.persistencePath = persistencePath;<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    this.blockSize = blockSize;<a name="line.283"></a>
+<span class="sourceLineNo">284</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    }<a name="line.289"></a>
+<span class="sourceLineNo">290</span><a name="line.290"></a>
+<span class="sourceLineNo">291</span>    assert writerQueues.size() == writerThreads.length;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.294"></a>
 <span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        retrieveFromFile(bucketSizes);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      } catch (IOException ioex) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        LOG.error("Can't restore from file because of", ioex);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      } catch (ClassNotFoundException cnfe) {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        LOG.error("Can't restore from file in rebuild because can't deserialise",cnfe);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        throw new RuntimeException(cnfe);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    }<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    final String threadName = Thread.currentThread().getName();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    this.cacheEnabled = true;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      writerThreads[i].setDaemon(true);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    startWriterThreads();<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    // every five minutes.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.320"></a>
-<span class="sourceLineNo">321</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  }<a name="line.327"></a>
-<span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  private void sanityCheckConfigs() {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.337"></a>
-<span class="sourceLineNo">338</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  }<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * starting the threads.<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  @VisibleForTesting<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected void startWriterThreads() {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    for (WriterThread thread : writerThreads) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>      thread.start();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">296</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      try {<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        retrieveFromFile(bucketSizes);<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      } catch (IOException ioex) {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>        LOG.error("Can't restore from file[" + persistencePath + "] because of ", ioex);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    }<a name="line.302"></a>
+<span class="sourceLineNo">303</span>    final String threadName = Thread.currentThread().getName();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    this.cacheEnabled = true;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.306"></a>
+<span class="sourceLineNo">307</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>      writerThreads[i].setDaemon(true);<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    startWriterThreads();<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    // every five minutes.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.315"></a>
+<span class="sourceLineNo">316</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.317"></a>
+<span class="sourceLineNo">318</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.318"></a>
+<span class="sourceLineNo">319</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
+<span class="sourceLineNo">323</span><a name="line.323"></a>
+<span class="sourceLineNo">324</span>  private void sanityCheckConfigs() {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.325"></a>
+<span class="sourceLineNo">326</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.327"></a>
+<span class="sourceLineNo">328</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.332"></a>
+<span class="sourceLineNo">333</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.333"></a>
+<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * starting the threads.<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   */<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  @VisibleForTesting<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  protected void startWriterThreads() {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    for (WriterThread thread : writerThreads) {<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      thread.start();<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  }<a name="line.345"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  boolean isCacheEnabled() {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    return this.cacheEnabled;<a name="line.349"></a>
 <span class="sourceLineNo">350</span>  }<a name="line.350"></a>
 <span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>  @VisibleForTesting<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  boolean isCacheEnabled() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    return this.cacheEnabled;<a name="line.354"></a>
+<span class="sourceLineNo">352</span>  @Override<a name="line.352"></a>
+<span class="sourceLineNo">353</span>  public long getMaxSize() {<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    return this.cacheCapacity;<a name="line.354"></a>
 <span class="sourceLineNo">355</span>  }<a name="line.355"></a>
 <span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>  public long getMaxSize() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    return this.cacheCapacity;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  public String getIoEngine() {<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return ioEngine.toString();<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Get the IOEngine from the IO engine name<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   * @param ioEngineName<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param capacity<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   * @param persistencePath<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @return the IOEngine<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   * @throws IOException<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
-<span class="sourceLineNo">374</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      throws IOException {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      // the compatibility<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return new ByteBufferIOEngine(capacity);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    } else {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      throw new IllegalArgumentException(<a name="line.388"></a>
-<span class="sourceLineNo">389</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Cache the block with the specified name and buffer.<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   * @param cacheKey block's cache key<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param buf block buffer<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   */<a name="line.397"></a>
-<span class="sourceLineNo">398</span>  @Override<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    cacheBlock(cacheKey, buf, false);<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  }<a name="line.401"></a>
-<span class="sourceLineNo">402</span><a name="line.402"></a>
-<span class="sourceLineNo">403</span>  /**<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * Cache the block with the specified name and buffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   * @param cacheKey block's cache key<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @param cachedItem block buffer<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   * @param inMemory if block is in-memory<a name="line.407"></a>
-<span class="sourceLineNo">408</span>   */<a name="line.408"></a>
-<span class="sourceLineNo">409</span>  @Override<a name="line.409"></a>
-<span class="sourceLineNo">410</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * Cache the block to ramCache<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   * @param cacheKey block's cache key<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @param cachedItem block buffer<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @param inMemory if block is in-memory<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * @param wait if true, blocking wait when queue is full<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.421"></a>
-<span class="sourceLineNo">422</span>      boolean wait) {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    if (cacheEnabled) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>      } else {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      boolean inMemory, boolean wait) {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    if (!cacheEnabled) {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      return;<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    }<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.439"></a>
-<span class="sourceLineNo">440</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    RAMQueueEntry re =<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.447"></a>
-<span class="sourceLineNo">448</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.448"></a>
-<span class="sourceLineNo">449</span>     */<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      return;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    boolean successfulAddition = false;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    if (wait) {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      try {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.458"></a>
-<span class="sourceLineNo">459</span>      } catch (InterruptedException e) {<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        Thread.currentThread().interrupt();<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    } else {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>      successfulAddition = bq.offer(re);<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    }<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    if (!successfulAddition) {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>      ramCache.remove(cacheKey);<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      cacheStats.failInsert();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    } else {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      this.blockNumber.increment();<a name="line.469"></a>
-<span class="sourceLineNo">470</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      blocksByHFile.add(cacheKey);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    }<a name="line.472"></a>
-<span class="sourceLineNo">473</span>  }<a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Get the buffer of the block with the specified key.<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   * @param key block's cache key<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   */<a name="line.482"></a>
-<span class="sourceLineNo">483</span>  @Override<a name="line.483"></a>
-<span class="sourceLineNo">484</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      boolean updateCacheMetrics) {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    if (!cacheEnabled) {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>      return null;<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    }<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    if (re != null) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      if (updateCacheMetrics) {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      }<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      re.access(accessCount.incrementAndGet());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      return re.getData();<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    if (bucketEntry != null) {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      long start = System.nanoTime();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      try {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        lock.readLock().lock();<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // existence here.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>          // TODO : change this area - should be removed after server cells and<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // 12295 are available<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          int len = bucketEntry.getLength();<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          if (LOG.isTraceEnabled()) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.511"></a>
-<span class="sourceLineNo">512</span>          }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.513"></a>
-<span class="sourceLineNo">514</span>              bucketEntry.deserializerReference(this.deserialiserMap));<a name="line.514"></a>
-<span class="sourceLineNo">515</span>          long timeTaken = System.nanoTime() - start;<a name="line.515"></a>
-<span class="sourceLineNo">516</span>          if (updateCacheMetrics) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.517"></a>
-<span class="sourceLineNo">518</span>            cacheStats.ioHit(timeTaken);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>            bucketEntry.incrementRefCountAndGet();<a name="line.521"></a>
-<span class="sourceLineNo">522</span>          }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.523"></a>
-<span class="sourceLineNo">524</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>            ioErrorStartTime = -1;<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          }<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          return cachedBlock;<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        }<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      } catch (IOException ioex) {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        LOG.error("Failed reading block " + key + " from bucket cache", ioex);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>        checkIOErrorIsTolerated();<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      } finally {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        lock.readLock().unlock();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      }<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>    if (!repeat &amp;&amp; updateCacheMetrics) {<a name="line.536"></a>
-<span class="sourceLineNo">537</span>      cacheStats.miss(caching, key.isPrimary(), key.getBlockType());<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    }<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    return null;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  }<a name="line.540"></a>
-<span class="sourceLineNo">541</span><a name="line.541"></a>
-<span class="sourceLineNo">542</span>  @VisibleForTesting<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  void blockEvicted(BlockCacheKey cacheKey, BucketEntry bucketEntry, boolean decrementBlockNumber) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    bucketAllocator.freeBlock(bucketEntry.offset());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    realCacheSize.add(-1 * bucketEntry.getLength());<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    blocksByHFile.remove(cacheKey);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    if (decrementBlockNumber) {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.blockNumber.decrement();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
+<span class="sourceLineNo">357</span>  public String getIoEngine() {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    return ioEngine.toString();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Get the IOEngine from the IO engine name<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * @param ioEngineName<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * @param capacity<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * @param persistencePath<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return the IOEngine<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * @throws IOException<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throws IOException {<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.372"></a>
+<span class="sourceLineNo">373</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      // the compatibility<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.375"></a>
+<span class="sourceLineNo">376</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.378"></a>
+<span class="sourceLineNo">379</span>      return new ByteBufferIOEngine(capacity);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    } else {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      throw new IllegalArgumentException(<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  }<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * Cache the block with the specified name and buffer.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @param cacheKey block's cache key<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   * @param buf block buffer<a name="line.391"></a>
+<span class="sourceLineNo">392</span>   */<a name="line.392"></a>
+<span class="sourceLineNo">393</span>  @Override<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    cacheBlock(cacheKey, buf, false);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   * Cache the block with the specified name and buffer.<a name="line.399"></a>
+<span class="sourceLineNo">400</span>   * @param cacheKey block's cache key<a name="line.400"></a>
+<span class="sourceLineNo">401</span>   * @param cachedItem block buffer<a name="line.401"></a>
+<span class="sourceLineNo">402</span>   * @param inMemory if block is in-memory<a name="line.402"></a>
+<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  @Override<a name="line.404"></a>
+<span class="sourceLineNo">405</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * Cache the block to ramCache<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   * @param cacheKey block's cache key<a name="line.411"></a>
+<span class="sourceLineNo">412</span>   * @param cachedItem block buffer<a name="line.412"></a>
+<span class="sourceLineNo">413</span>   * @param inMemory if block is in-memory<a name="line.413"></a>
+<span class="sourceLineNo">414</span>   * @param wait if true, blocking wait when queue is full<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.416"></a>
+<span class="sourceLineNo">417</span>      boolean wait) {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    if (cacheEnabled) {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.420"></a>
+<span class="sourceLineNo">421</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.421"></a>
+<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      } else {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      }<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  }<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      boolean inMemory, boolean wait) {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    if (!cacheEnabled) {<a name="line.431"></a>
+<span class="sourceLineNo">432</span>      return;<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    RAMQueueEntry re =<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    /**<a name="line.438"></a>
+<span class="sourceLineNo">439</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.439"></a>
+<span class="sourceLineNo">440</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.440"></a>
+<span class="sourceLineNo">441</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.441"></a>
+<span class="sourceLineNo">442</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.442"></a>
+<span class="sourceLineNo">443</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>     */<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      return;<a name="line.446"></a>
+<span class="sourceLineNo">447</span>    }<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    boolean successfulAddition = false;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    if (wait) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      try {<a name="line.452"></a>
+<span class="sourceLineNo">453</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.453"></a>
+<span class="sourceLineNo">454</span>      } catch (InterruptedException e) {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>        Thread.currentThread().interrupt();<a name="line.455"></a>
+<span class="sourceLineNo">456</span>      }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>    } else {<a name="line.457"></a>
+<span class="sourceLineNo">458</span>      successfulAddition = bq.offer(re);<a name="line.458"></a>
+<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
+<span class="sourceLineNo">460</span>    if (!successfulAddition) {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>      ramCache.remove(cacheKey);<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      cacheStats.failInsert();<a name="line.462"></a>
+<span class="sourceLineNo">463</span>    } else {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      this.blockNumber.increment();<a name="line.464"></a>
+<span class="sourceLineNo">465</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.465"></a>
+<span class="sourceLineNo">466</span>      blocksByHFile.add(cacheKey);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>    }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
+<span class="sourceLineNo">469</span><a name="line.469"></a>
+<span class="sourceLineNo">470</span>  /**<a name="line.470"></a>
+<span class="sourceLineNo">471</span>   * Get the buffer of the block with the specified key.<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @param key block's cache key<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.474"></a>
+<span class="sourceLineNo">475</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.475"></a>
+<span class="sourceLineNo">476</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
+<span class="sourceLineNo">478</span>  @Override<a name="line.478"></a>
+<span class="sourceLineNo">479</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.479"></a>
+<span class="sourceLineNo">480</span>      boolean updateCacheMetrics) {<a name="line.480"></a>
+<span class="sourceLineNo">481</span>    if (!cacheEnabled) {<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      return null;<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    }<a name="line.483"></a>
+<span class="sourceLineNo">484</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    if (re != null) {<a name="line.485"></a>
+<span class="sourceLineNo">486</span>      if (updateCacheMetrics) {<a name="line.486"></a>
+<span class="sourceLineNo">487</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.487"></a>
+<span class="sourceLineNo">488</span>      }<a name="line.488"></a>
+<span class="sourceLineNo">489</span>      re.access(accessCount.incrementAndGet());<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      return re.getData();<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    }<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    if (bucketEntry != null) {<a name="line.493"></a>
+<span class="sourceLineNo">494</span>      long start = System.nanoTime();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.495"></a>
+<span class="sourceLineNo">496</span>      try {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>        lock.readLock().lock();<a name="line.497"></a>
+<span class="sourceLineNo">498</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.498"></a>
+<span class="sourceLineNo">499</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.499"></a>
+<span class="sourceLineNo">500</span>        // existence here.<a name="line.500"></a>
+<span class="sourceLineNo">501</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.501"></a>
+<span class="sourceLineNo">502</span>          // TODO : change this area - should be removed after server cells and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>          // 12295 are available<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          int len = bucketEntry.getLength();<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          if (LOG.isTraceEnabled()) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>          }<a name="line.507"></a>
+<span class="sourceLineNo">508</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.508"></a>
+<span class="sourceLineNo">509</span>              bucketEntry.deserializerReference());<a name="line.509"></a>
+<span class="sourceLineNo">510</span>          long timeTaken = System.nanoTime() - start;<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          if (updateCacheMetrics) {<a name="line.511"></a>
+<span class="sourceLineNo">512</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.512"></a>
+<span class="sourceLineNo">513</span>            cacheStats.ioHit(timeTaken);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
+<span class="sourceLineNo">515</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>            bucketEntry.incrementRefCountAndGet();<a name="line.516"></a>
+<span class="sourceLineNo">517</span>          }<a name="line.517"></a>
+<span class="sourceLineNo">518</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.518"></a>
+<span class="sourceLineNo">519</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.519"></a>
+<span class="sourceLineNo">520</span>            ioErrorStartTime = -1;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          }<a name="line.521"></a>
+<span class="sourceLineNo">522</span>          return cachedBlock;<a name="line.522"></a>
+<span c

<TRUNCATED>

[17/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
index bd3c59e..21e240a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
@@ -33,62 +33,62 @@
 <span class="sourceLineNo">025</span>import java.io.FileNotFoundException;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.io.FileOutputStream;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.io.IOException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.io.ObjectInputStream;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.io.ObjectOutputStream;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.io.Serializable;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import java.nio.ByteBuffer;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.util.ArrayList;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.util.Comparator;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.util.HashSet;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.util.Iterator;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import java.util.List;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import java.util.Map;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import java.util.NavigableSet;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import java.util.PriorityQueue;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import java.util.Set;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import java.util.concurrent.BlockingQueue;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import java.util.concurrent.ConcurrentMap;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.TimeUnit;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import java.util.concurrent.atomic.LongAdder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import java.util.concurrent.locks.Lock;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.conf.Configuration;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.util.StringUtils;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.slf4j.Logger;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.slf4j.LoggerFactory;<a name="line.79"></a>
-<span class="sourceLineNo">080</span><a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.83"></a>
+<span class="sourceLineNo">028</span>import java.io.Serializable;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.nio.ByteBuffer;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.ArrayList;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.Comparator;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import java.util.HashSet;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.Iterator;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.List;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.Map;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.NavigableSet;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import java.util.PriorityQueue;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import java.util.Set;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import java.util.concurrent.BlockingQueue;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import java.util.concurrent.ConcurrentMap;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentSkipListSet;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import java.util.concurrent.Executors;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import java.util.concurrent.ScheduledExecutorService;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.TimeUnit;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.atomic.LongAdder;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.locks.Lock;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.conf.Configuration;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.io.hfile.BlockCache;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.BlockPriority;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.io.hfile.BlockType;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.io.hfile.CacheStats;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.io.hfile.Cacheable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.io.hfile.CachedBlock;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.hfile.HFileBlock;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.protobuf.ProtobufMagic;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.HasThread;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.IdReadWriteLock;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.UnsafeAvailChecker;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.util.StringUtils;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.Logger;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.slf4j.LoggerFactory;<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos;<a name="line.83"></a>
 <span class="sourceLineNo">084</span><a name="line.84"></a>
 <span class="sourceLineNo">085</span>/**<a name="line.85"></a>
 <span class="sourceLineNo">086</span> * BucketCache uses {@link BucketAllocator} to allocate/free blocks, and uses<a name="line.86"></a>
@@ -172,1540 +172,1557 @@
 <span class="sourceLineNo">164</span>  private volatile boolean freeInProgress = false;<a name="line.164"></a>
 <span class="sourceLineNo">165</span>  private final Lock freeSpaceLock = new ReentrantLock();<a name="line.165"></a>
 <span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  private UniqueIndexMap&lt;Integer&gt; deserialiserMap = new UniqueIndexMap&lt;&gt;();<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  private final LongAdder heapSize = new LongAdder();<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  /** Current number of cached elements */<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.172"></a>
-<span class="sourceLineNo">173</span><a name="line.173"></a>
-<span class="sourceLineNo">174</span>  /** Cache access count (sequential ID) */<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  boolean wait_when_cache = false;<a name="line.181"></a>
+<span class="sourceLineNo">167</span>  private final LongAdder realCacheSize = new LongAdder();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  private final LongAdder heapSize = new LongAdder();<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  /** Current number of cached elements */<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  private final LongAdder blockNumber = new LongAdder();<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>  /** Cache access count (sequential ID) */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  private final AtomicLong accessCount = new AtomicLong();<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  private static final int DEFAULT_CACHE_WAIT_TIME = 50;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  // Used in test now. If the flag is false and the cache speed is very fast,<a name="line.176"></a>
+<span class="sourceLineNo">177</span>  // bucket cache will skip some blocks when caching. If the flag is true, we<a name="line.177"></a>
+<span class="sourceLineNo">178</span>  // will wait blocks flushed to IOEngine for some time when caching<a name="line.178"></a>
+<span class="sourceLineNo">179</span>  boolean wait_when_cache = false;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.181"></a>
 <span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private final BucketCacheStats cacheStats = new BucketCacheStats();<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  private final String persistencePath;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  private final long cacheCapacity;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /** Approximate block size */<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  private final long blockSize;<a name="line.188"></a>
-<span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  private final int ioErrorsTolerationDuration;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  // 1 min<a name="line.192"></a>
-<span class="sourceLineNo">193</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.193"></a>
-<span class="sourceLineNo">194</span><a name="line.194"></a>
-<span class="sourceLineNo">195</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  // reset after a successful read/write.<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private volatile long ioErrorStartTime = -1;<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * &lt;p&gt;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   */<a name="line.204"></a>
-<span class="sourceLineNo">205</span>  @VisibleForTesting<a name="line.205"></a>
-<span class="sourceLineNo">206</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        @Override<a name="line.210"></a>
-<span class="sourceLineNo">211</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.212"></a>
-<span class="sourceLineNo">213</span>          if (nameComparison != 0) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            return nameComparison;<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>          if (a.getOffset() == b.getOffset()) {<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            return 0;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            return -1;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>          }<a name="line.221"></a>
-<span class="sourceLineNo">222</span>          return 1;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      });<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.226"></a>
-<span class="sourceLineNo">227</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  // Allocate or free space for the block<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private BucketAllocator bucketAllocator;<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  private float acceptableFactor;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private float minFactor;<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private float extraFreeFactor;<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /** Single access bucket size */<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  private float singleFactor;<a name="line.243"></a>
-<span class="sourceLineNo">244</span><a name="line.244"></a>
-<span class="sourceLineNo">245</span>  /** Multiple access bucket size */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private float multiFactor;<a name="line.246"></a>
-<span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  /** In-memory bucket size */<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private float memoryFactor;<a name="line.249"></a>
-<span class="sourceLineNo">250</span><a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      IOException {<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  }<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.258"></a>
-<span class="sourceLineNo">259</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.259"></a>
-<span class="sourceLineNo">260</span>                     Configuration conf)<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      throws FileNotFoundException, IOException {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    long blockNumCapacity = capacity / blockSize;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      // Enough for about 32TB of cache!<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
-<span class="sourceLineNo">269</span><a name="line.269"></a>
-<span class="sourceLineNo">270</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.275"></a>
+<span class="sourceLineNo">183</span>  private final String persistencePath;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  private final long cacheCapacity;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  /** Approximate block size */<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  private final long blockSize;<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /** Duration of IO errors tolerated before we disable cache, 1 min as default */<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  private final int ioErrorsTolerationDuration;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  // 1 min<a name="line.190"></a>
+<span class="sourceLineNo">191</span>  public static final int DEFAULT_ERROR_TOLERATION_DURATION = 60 * 1000;<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>  // Start time of first IO error when reading or writing IO Engine, it will be<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  // reset after a successful read/write.<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  private volatile long ioErrorStartTime = -1;<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * A ReentrantReadWriteLock to lock on a particular block identified by offset.<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * The purpose of this is to avoid freeing the block which is being read.<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;p&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * Key set of offsets in BucketCache is limited so soft reference is the best choice here.<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   */<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  @VisibleForTesting<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  final IdReadWriteLock offsetLock = new IdReadWriteLock(ReferenceType.SOFT);<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>  private final NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      new ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a name="line.207"></a>
+<span class="sourceLineNo">208</span>        @Override<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        public int compare(BlockCacheKey a, BlockCacheKey b) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>          int nameComparison = a.getHfileName().compareTo(b.getHfileName());<a name="line.210"></a>
+<span class="sourceLineNo">211</span>          if (nameComparison != 0) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>            return nameComparison;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>          }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>          if (a.getOffset() == b.getOffset()) {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>            return 0;<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          } else if (a.getOffset() &lt; b.getOffset()) {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>            return -1;<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>          return 1;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        }<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      });<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>  /** Statistics thread schedule pool (for heavy debugging, could remove) */<a name="line.224"></a>
+<span class="sourceLineNo">225</span>  private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build());<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>  // Allocate or free space for the block<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private BucketAllocator bucketAllocator;<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>  /** Acceptable size of cache (no evictions if size &lt; acceptable) */<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  private float acceptableFactor;<a name="line.232"></a>
+<span class="sourceLineNo">233</span><a name="line.233"></a>
+<span class="sourceLineNo">234</span>  /** Minimum threshold of cache (when evicting, evict until size &lt; min) */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  private float minFactor;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>  /** Free this floating point factor of extra blocks when evicting. For example free the number of blocks requested * (1 + extraFreeFactor) */<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private float extraFreeFactor;<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>  /** Single access bucket size */<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  private float singleFactor;<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /** Multiple access bucket size */<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private float multiFactor;<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>  /** In-memory bucket size */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private float memoryFactor;<a name="line.247"></a>
+<span class="sourceLineNo">248</span><a name="line.248"></a>
+<span class="sourceLineNo">249</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      IOException {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen,<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  }<a name="line.254"></a>
+<span class="sourceLineNo">255</span><a name="line.255"></a>
+<span class="sourceLineNo">256</span>  public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,<a name="line.256"></a>
+<span class="sourceLineNo">257</span>                     int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration,<a name="line.257"></a>
+<span class="sourceLineNo">258</span>                     Configuration conf)<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      throws FileNotFoundException, IOException {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    this.writerThreads = new WriterThread[writerThreadNum];<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    long blockNumCapacity = capacity / blockSize;<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    if (blockNumCapacity &gt;= Integer.MAX_VALUE) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      // Enough for about 32TB of cache!<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
+<span class="sourceLineNo">267</span><a name="line.267"></a>
+<span class="sourceLineNo">268</span>    this.acceptableFactor = conf.getFloat(ACCEPT_FACTOR_CONFIG_NAME, DEFAULT_ACCEPT_FACTOR);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    this.minFactor = conf.getFloat(MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    this.extraFreeFactor = conf.getFloat(EXTRA_FREE_FACTOR_CONFIG_NAME, DEFAULT_EXTRA_FREE_FACTOR);<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    this.singleFactor = conf.getFloat(SINGLE_FACTOR_CONFIG_NAME, DEFAULT_SINGLE_FACTOR);<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    this.multiFactor = conf.getFloat(MULTI_FACTOR_CONFIG_NAME, DEFAULT_MULTI_FACTOR);<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.memoryFactor = conf.getFloat(MEMORY_FACTOR_CONFIG_NAME, DEFAULT_MEMORY_FACTOR);<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>    sanityCheckConfigs();<a name="line.275"></a>
 <span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    sanityCheckConfigs();<a name="line.277"></a>
-<span class="sourceLineNo">278</span><a name="line.278"></a>
-<span class="sourceLineNo">279</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.280"></a>
-<span class="sourceLineNo">281</span>        ", memoryFactor: " + memoryFactor);<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    this.cacheCapacity = capacity;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    this.persistencePath = persistencePath;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    this.blockSize = blockSize;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.286"></a>
-<span class="sourceLineNo">287</span><a name="line.287"></a>
-<span class="sourceLineNo">288</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>    assert writerQueues.size() == writerThreads.length;<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.294"></a>
+<span class="sourceLineNo">277</span>    LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor +<a name="line.277"></a>
+<span class="sourceLineNo">278</span>        ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor +<a name="line.278"></a>
+<span class="sourceLineNo">279</span>        ", memoryFactor: " + memoryFactor);<a name="line.279"></a>
+<span class="sourceLineNo">280</span><a name="line.280"></a>
+<span class="sourceLineNo">281</span>    this.cacheCapacity = capacity;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    this.persistencePath = persistencePath;<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    this.blockSize = blockSize;<a name="line.283"></a>
+<span class="sourceLineNo">284</span>    this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>    bucketAllocator = new BucketAllocator(capacity, bucketSizes);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      writerQueues.add(new ArrayBlockingQueue&lt;&gt;(writerQLen));<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    }<a name="line.289"></a>
+<span class="sourceLineNo">290</span><a name="line.290"></a>
+<span class="sourceLineNo">291</span>    assert writerQueues.size() == writerThreads.length;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    this.ramCache = new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.294"></a>
 <span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>    this.backingMap = new ConcurrentHashMap&lt;&gt;((int) blockNumCapacity);<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        retrieveFromFile(bucketSizes);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      } catch (IOException ioex) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        LOG.error("Can't restore from file because of", ioex);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      } catch (ClassNotFoundException cnfe) {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        LOG.error("Can't restore from file in rebuild because can't deserialise",cnfe);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        throw new RuntimeException(cnfe);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    }<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    final String threadName = Thread.currentThread().getName();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    this.cacheEnabled = true;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      writerThreads[i].setDaemon(true);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    startWriterThreads();<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    // every five minutes.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.320"></a>
-<span class="sourceLineNo">321</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  }<a name="line.327"></a>
-<span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  private void sanityCheckConfigs() {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.337"></a>
-<span class="sourceLineNo">338</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  }<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * starting the threads.<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  @VisibleForTesting<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected void startWriterThreads() {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    for (WriterThread thread : writerThreads) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>      thread.start();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">296</span>    if (ioEngine.isPersistent() &amp;&amp; persistencePath != null) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      try {<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        retrieveFromFile(bucketSizes);<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      } catch (IOException ioex) {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>        LOG.error("Can't restore from file[" + persistencePath + "] because of ", ioex);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    }<a name="line.302"></a>
+<span class="sourceLineNo">303</span>    final String threadName = Thread.currentThread().getName();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    this.cacheEnabled = true;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    for (int i = 0; i &lt; writerThreads.length; ++i) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      writerThreads[i] = new WriterThread(writerQueues.get(i));<a name="line.306"></a>
+<span class="sourceLineNo">307</span>      writerThreads[i].setName(threadName + "-BucketCacheWriter-" + i);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>      writerThreads[i].setDaemon(true);<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    startWriterThreads();<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>    // Run the statistics thread periodically to print the cache statistics log<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    // TODO: Add means of turning this off.  Bit obnoxious running thread just to make a log<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    // every five minutes.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),<a name="line.315"></a>
+<span class="sourceLineNo">316</span>        statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    LOG.info("Started bucket cache; ioengine=" + ioEngineName +<a name="line.317"></a>
+<span class="sourceLineNo">318</span>        ", capacity=" + StringUtils.byteDesc(capacity) +<a name="line.318"></a>
+<span class="sourceLineNo">319</span>      ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" +<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" +<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName());<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
+<span class="sourceLineNo">323</span><a name="line.323"></a>
+<span class="sourceLineNo">324</span>  private void sanityCheckConfigs() {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    Preconditions.checkArgument(acceptableFactor &lt;= 1 &amp;&amp; acceptableFactor &gt;= 0, ACCEPT_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.325"></a>
+<span class="sourceLineNo">326</span>    Preconditions.checkArgument(minFactor &lt;= 1 &amp;&amp; minFactor &gt;= 0, MIN_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    Preconditions.checkArgument(minFactor &lt;= acceptableFactor, MIN_FACTOR_CONFIG_NAME + " must be &lt;= " + ACCEPT_FACTOR_CONFIG_NAME);<a name="line.327"></a>
+<span class="sourceLineNo">328</span>    Preconditions.checkArgument(extraFreeFactor &gt;= 0, EXTRA_FREE_FACTOR_CONFIG_NAME + " must be greater than 0.0");<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    Preconditions.checkArgument(singleFactor &lt;= 1 &amp;&amp; singleFactor &gt;= 0, SINGLE_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    Preconditions.checkArgument(multiFactor &lt;= 1 &amp;&amp; multiFactor &gt;= 0, MULTI_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    Preconditions.checkArgument(memoryFactor &lt;= 1 &amp;&amp; memoryFactor &gt;= 0, MEMORY_FACTOR_CONFIG_NAME + " must be between 0.0 and 1.0");<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    Preconditions.checkArgument((singleFactor + multiFactor + memoryFactor) == 1, SINGLE_FACTOR_CONFIG_NAME + ", " +<a name="line.332"></a>
+<span class="sourceLineNo">333</span>        MULTI_FACTOR_CONFIG_NAME + ", and " + MEMORY_FACTOR_CONFIG_NAME + " segments must add up to 1.0");<a name="line.333"></a>
+<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * Called by the constructor to start the writer threads. Used by tests that need to override<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * starting the threads.<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   */<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  @VisibleForTesting<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  protected void startWriterThreads() {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    for (WriterThread thread : writerThreads) {<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      thread.start();<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  }<a name="line.345"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  boolean isCacheEnabled() {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    return this.cacheEnabled;<a name="line.349"></a>
 <span class="sourceLineNo">350</span>  }<a name="line.350"></a>
 <span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>  @VisibleForTesting<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  boolean isCacheEnabled() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    return this.cacheEnabled;<a name="line.354"></a>
+<span class="sourceLineNo">352</span>  @Override<a name="line.352"></a>
+<span class="sourceLineNo">353</span>  public long getMaxSize() {<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    return this.cacheCapacity;<a name="line.354"></a>
 <span class="sourceLineNo">355</span>  }<a name="line.355"></a>
 <span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>  public long getMaxSize() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    return this.cacheCapacity;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  public String getIoEngine() {<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return ioEngine.toString();<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Get the IOEngine from the IO engine name<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   * @param ioEngineName<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param capacity<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   * @param persistencePath<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @return the IOEngine<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   * @throws IOException<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
-<span class="sourceLineNo">374</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      throws IOException {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      // the compatibility<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return new ByteBufferIOEngine(capacity);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    } else {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      throw new IllegalArgumentException(<a name="line.388"></a>
-<span class="sourceLineNo">389</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Cache the block with the specified name and buffer.<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   * @param cacheKey block's cache key<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param buf block buffer<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   */<a name="line.397"></a>
-<span class="sourceLineNo">398</span>  @Override<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    cacheBlock(cacheKey, buf, false);<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  }<a name="line.401"></a>
-<span class="sourceLineNo">402</span><a name="line.402"></a>
-<span class="sourceLineNo">403</span>  /**<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * Cache the block with the specified name and buffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   * @param cacheKey block's cache key<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @param cachedItem block buffer<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   * @param inMemory if block is in-memory<a name="line.407"></a>
-<span class="sourceLineNo">408</span>   */<a name="line.408"></a>
-<span class="sourceLineNo">409</span>  @Override<a name="line.409"></a>
-<span class="sourceLineNo">410</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * Cache the block to ramCache<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   * @param cacheKey block's cache key<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @param cachedItem block buffer<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @param inMemory if block is in-memory<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * @param wait if true, blocking wait when queue is full<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.421"></a>
-<span class="sourceLineNo">422</span>      boolean wait) {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    if (cacheEnabled) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>      } else {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      boolean inMemory, boolean wait) {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    if (!cacheEnabled) {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      return;<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    }<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.439"></a>
-<span class="sourceLineNo">440</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    RAMQueueEntry re =<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.447"></a>
-<span class="sourceLineNo">448</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.448"></a>
-<span class="sourceLineNo">449</span>     */<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      return;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    boolean successfulAddition = false;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    if (wait) {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      try {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.458"></a>
-<span class="sourceLineNo">459</span>      } catch (InterruptedException e) {<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        Thread.currentThread().interrupt();<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    } else {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>      successfulAddition = bq.offer(re);<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    }<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    if (!successfulAddition) {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>      ramCache.remove(cacheKey);<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      cacheStats.failInsert();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    } else {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      this.blockNumber.increment();<a name="line.469"></a>
-<span class="sourceLineNo">470</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      blocksByHFile.add(cacheKey);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    }<a name="line.472"></a>
-<span class="sourceLineNo">473</span>  }<a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Get the buffer of the block with the specified key.<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   * @param key block's cache key<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   */<a name="line.482"></a>
-<span class="sourceLineNo">483</span>  @Override<a name="line.483"></a>
-<span class="sourceLineNo">484</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      boolean updateCacheMetrics) {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    if (!cacheEnabled) {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>      return null;<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    }<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    if (re != null) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      if (updateCacheMetrics) {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      }<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      re.access(accessCount.incrementAndGet());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      return re.getData();<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    if (bucketEntry != null) {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      long start = System.nanoTime();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      try {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        lock.readLock().lock();<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // existence here.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>          // TODO : change this area - should be removed after server cells and<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // 12295 are available<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          int len = bucketEntry.getLength();<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          if (LOG.isTraceEnabled()) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.511"></a>
-<span class="sourceLineNo">512</span>          }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.513"></a>
-<span class="sourceLineNo">514</span>              bucketEntry.deserializerReference(this.deserialiserMap));<a name="line.514"></a>
-<span class="sourceLineNo">515</span>          long timeTaken = System.nanoTime() - start;<a name="line.515"></a>
-<span class="sourceLineNo">516</span>          if (updateCacheMetrics) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.517"></a>
-<span class="sourceLineNo">518</span>            cacheStats.ioHit(timeTaken);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>            bucketEntry.incrementRefCountAndGet();<a name="line.521"></a>
-<span class="sourceLineNo">522</span>          }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.523"></a>
-<span class="sourceLineNo">524</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>            ioErrorStartTime = -1;<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          }<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          return cachedBlock;<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        }<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      } catch (IOException ioex) {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        LOG.error("Failed reading block " + key + " from bucket cache", ioex);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>        checkIOErrorIsTolerated();<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      } finally {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        lock.readLock().unlock();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      }<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>    if (!repeat &amp;&amp; updateCacheMetrics) {<a name="line.536"></a>
-<span class="sourceLineNo">537</span>      cacheStats.miss(caching, key.isPrimary(), key.getBlockType());<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    }<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    return null;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  }<a name="line.540"></a>
-<span class="sourceLineNo">541</span><a name="line.541"></a>
-<span class="sourceLineNo">542</span>  @VisibleForTesting<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  void blockEvicted(BlockCacheKey cacheKey, BucketEntry bucketEntry, boolean decrementBlockNumber) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    bucketAllocator.freeBlock(bucketEntry.offset());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    realCacheSize.add(-1 * bucketEntry.getLength());<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    blocksByHFile.remove(cacheKey);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    if (decrementBlockNumber) {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.blockNumber.decrement();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
+<span class="sourceLineNo">357</span>  public String getIoEngine() {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    return ioEngine.toString();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Get the IOEngine from the IO engine name<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * @param ioEngineName<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * @param capacity<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * @param persistencePath<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return the IOEngine<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * @throws IOException<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throws IOException {<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      // In order to make the usage simple, we only need the prefix 'files:' in<a name="line.372"></a>
+<span class="sourceLineNo">373</span>      // document whether one or multiple file(s), but also support 'file:' for<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      // the compatibility<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)<a name="line.375"></a>
+<span class="sourceLineNo">376</span>          .split(FileIOEngine.FILE_DELIMITER);<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      return new FileIOEngine(capacity, persistencePath != null, filePaths);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    } else if (ioEngineName.startsWith("offheap")) {<a name="line.378"></a>
+<span class="sourceLineNo">379</span>      return new ByteBufferIOEngine(capacity);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    } else if (ioEngineName.startsWith("mmap:")) {<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      return new FileMmapEngine(ioEngineName.substring(5), capacity);<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    } else {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      throw new IllegalArgumentException(<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          "Don't understand io engine name for cache- prefix with file:, files:, mmap: or offheap");<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  }<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * Cache the block with the specified name and buffer.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @param cacheKey block's cache key<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   * @param buf block buffer<a name="line.391"></a>
+<span class="sourceLineNo">392</span>   */<a name="line.392"></a>
+<span class="sourceLineNo">393</span>  @Override<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    cacheBlock(cacheKey, buf, false);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   * Cache the block with the specified name and buffer.<a name="line.399"></a>
+<span class="sourceLineNo">400</span>   * @param cacheKey block's cache key<a name="line.400"></a>
+<span class="sourceLineNo">401</span>   * @param cachedItem block buffer<a name="line.401"></a>
+<span class="sourceLineNo">402</span>   * @param inMemory if block is in-memory<a name="line.402"></a>
+<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  @Override<a name="line.404"></a>
+<span class="sourceLineNo">405</span>  public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory) {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    cacheBlockWithWait(cacheKey, cachedItem, inMemory, wait_when_cache);<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * Cache the block to ramCache<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   * @param cacheKey block's cache key<a name="line.411"></a>
+<span class="sourceLineNo">412</span>   * @param cachedItem block buffer<a name="line.412"></a>
+<span class="sourceLineNo">413</span>   * @param inMemory if block is in-memory<a name="line.413"></a>
+<span class="sourceLineNo">414</span>   * @param wait if true, blocking wait when queue is full<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  private void cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean inMemory,<a name="line.416"></a>
+<span class="sourceLineNo">417</span>      boolean wait) {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    if (cacheEnabled) {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      if (backingMap.containsKey(cacheKey) || ramCache.containsKey(cacheKey)) {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>        if (BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, cachedItem)) {<a name="line.420"></a>
+<span class="sourceLineNo">421</span>          cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.421"></a>
+<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      } else {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>        cacheBlockWithWaitInternal(cacheKey, cachedItem, inMemory, wait);<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      }<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  }<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  private void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cachedItem,<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      boolean inMemory, boolean wait) {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    if (!cacheEnabled) {<a name="line.431"></a>
+<span class="sourceLineNo">432</span>      return;<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    LOG.trace("Caching key={}, item={}", cacheKey, cachedItem);<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    // Stuff the entry into the RAM cache so it can get drained to the persistent store<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    RAMQueueEntry re =<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        new RAMQueueEntry(cacheKey, cachedItem, accessCount.incrementAndGet(), inMemory);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    /**<a name="line.438"></a>
+<span class="sourceLineNo">439</span>     * Don't use ramCache.put(cacheKey, re) here. because there may be a existing entry with same<a name="line.439"></a>
+<span class="sourceLineNo">440</span>     * key in ramCache, the heap size of bucket cache need to update if replacing entry from<a name="line.440"></a>
+<span class="sourceLineNo">441</span>     * ramCache. But WriterThread will also remove entry from ramCache and update heap size, if<a name="line.441"></a>
+<span class="sourceLineNo">442</span>     * using ramCache.put(), It's possible that the removed entry in WriterThread is not the correct<a name="line.442"></a>
+<span class="sourceLineNo">443</span>     * one, then the heap size will mess up (HBASE-20789)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>     */<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    if (ramCache.putIfAbsent(cacheKey, re) != null) {<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      return;<a name="line.446"></a>
+<span class="sourceLineNo">447</span>    }<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    int queueNum = (cacheKey.hashCode() &amp; 0x7FFFFFFF) % writerQueues.size();<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    BlockingQueue&lt;RAMQueueEntry&gt; bq = writerQueues.get(queueNum);<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    boolean successfulAddition = false;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    if (wait) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      try {<a name="line.452"></a>
+<span class="sourceLineNo">453</span>        successfulAddition = bq.offer(re, DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);<a name="line.453"></a>
+<span class="sourceLineNo">454</span>      } catch (InterruptedException e) {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>        Thread.currentThread().interrupt();<a name="line.455"></a>
+<span class="sourceLineNo">456</span>      }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>    } else {<a name="line.457"></a>
+<span class="sourceLineNo">458</span>      successfulAddition = bq.offer(re);<a name="line.458"></a>
+<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
+<span class="sourceLineNo">460</span>    if (!successfulAddition) {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>      ramCache.remove(cacheKey);<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      cacheStats.failInsert();<a name="line.462"></a>
+<span class="sourceLineNo">463</span>    } else {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      this.blockNumber.increment();<a name="line.464"></a>
+<span class="sourceLineNo">465</span>      this.heapSize.add(cachedItem.heapSize());<a name="line.465"></a>
+<span class="sourceLineNo">466</span>      blocksByHFile.add(cacheKey);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>    }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
+<span class="sourceLineNo">469</span><a name="line.469"></a>
+<span class="sourceLineNo">470</span>  /**<a name="line.470"></a>
+<span class="sourceLineNo">471</span>   * Get the buffer of the block with the specified key.<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @param key block's cache key<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   * @param caching true if the caller caches blocks on cache misses<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   * @param repeat Whether this is a repeat lookup for the same block<a name="line.474"></a>
+<span class="sourceLineNo">475</span>   * @param updateCacheMetrics Whether we should update cache metrics or not<a name="line.475"></a>
+<span class="sourceLineNo">476</span>   * @return buffer of specified cache key, or null if not in cache<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
+<span class="sourceLineNo">478</span>  @Override<a name="line.478"></a>
+<span class="sourceLineNo">479</span>  public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,<a name="line.479"></a>
+<span class="sourceLineNo">480</span>      boolean updateCacheMetrics) {<a name="line.480"></a>
+<span class="sourceLineNo">481</span>    if (!cacheEnabled) {<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      return null;<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    }<a name="line.483"></a>
+<span class="sourceLineNo">484</span>    RAMQueueEntry re = ramCache.get(key);<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    if (re != null) {<a name="line.485"></a>
+<span class="sourceLineNo">486</span>      if (updateCacheMetrics) {<a name="line.486"></a>
+<span class="sourceLineNo">487</span>        cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.487"></a>
+<span class="sourceLineNo">488</span>      }<a name="line.488"></a>
+<span class="sourceLineNo">489</span>      re.access(accessCount.incrementAndGet());<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      return re.getData();<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    }<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    BucketEntry bucketEntry = backingMap.get(key);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    if (bucketEntry != null) {<a name="line.493"></a>
+<span class="sourceLineNo">494</span>      long start = System.nanoTime();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());<a name="line.495"></a>
+<span class="sourceLineNo">496</span>      try {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>        lock.readLock().lock();<a name="line.497"></a>
+<span class="sourceLineNo">498</span>        // We can not read here even if backingMap does contain the given key because its offset<a name="line.498"></a>
+<span class="sourceLineNo">499</span>        // maybe changed. If we lock BlockCacheKey instead of offset, then we can only check<a name="line.499"></a>
+<span class="sourceLineNo">500</span>        // existence here.<a name="line.500"></a>
+<span class="sourceLineNo">501</span>        if (bucketEntry.equals(backingMap.get(key))) {<a name="line.501"></a>
+<span class="sourceLineNo">502</span>          // TODO : change this area - should be removed after server cells and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>          // 12295 are available<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          int len = bucketEntry.getLength();<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          if (LOG.isTraceEnabled()) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>            LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>          }<a name="line.507"></a>
+<span class="sourceLineNo">508</span>          Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len,<a name="line.508"></a>
+<span class="sourceLineNo">509</span>              bucketEntry.deserializerReference());<a name="line.509"></a>
+<span class="sourceLineNo">510</span>          long timeTaken = System.nanoTime() - start;<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          if (updateCacheMetrics) {<a name="line.511"></a>
+<span class="sourceLineNo">512</span>            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());<a name="line.512"></a>
+<span class="sourceLineNo">513</span>            cacheStats.ioHit(timeTaken);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
+<span class="sourceLineNo">515</span>          if (cachedBlock.getMemoryType() == MemoryType.SHARED) {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>            bucketEntry.incrementRefCountAndGet();<a name="line.516"></a>
+<span class="sourceLineNo">517</span>          }<a name="line.517"></a>
+<span class="sourceLineNo">518</span>          bucketEntry.access(accessCount.incrementAndGet());<a name="line.518"></a>
+<span class="sourceLineNo">519</span>          if (this.ioErrorStartTime &gt; 0) {<a name="line.519"></a>
+<span class="sourceLineNo">520</span>            ioErrorStartTime = -1;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          }<a name="line.521"></a>
+<span class="sourceLineNo">522</span>          return cachedBlo

<TRUNCATED>

[44/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
index ae13b31..fa95c11 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1426">HFileBlock.FSReaderImpl</a>
+<pre>static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1430">HFileBlock.FSReaderImpl</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a></pre>
 <div class="block">Reads version 2 HFile blocks from the filesystem.</div>
@@ -376,7 +376,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>streamWrapper</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1429">streamWrapper</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1433">streamWrapper</a></pre>
 <div class="block">The file system stream of the underlying <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> that
  does or doesn't do checksum validations in the filesystem</div>
 </li>
@@ -387,7 +387,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>encodedBlockDecodingCtx</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1431">encodedBlockDecodingCtx</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1435">encodedBlockDecodingCtx</a></pre>
 </li>
 </ul>
 <a name="defaultDecodingCtx">
@@ -396,7 +396,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>defaultDecodingCtx</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.html" title="class in org.apache.hadoop.hbase.io.encoding">HFileBlockDefaultDecodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1434">defaultDecodingCtx</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.html" title="class in org.apache.hadoop.hbase.io.encoding">HFileBlockDefaultDecodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1438">defaultDecodingCtx</a></pre>
 <div class="block">Default context used when BlockType != <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html#ENCODED_DATA"><code>BlockType.ENCODED_DATA</code></a>.</div>
 </li>
 </ul>
@@ -406,7 +406,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>prefetchedHeader</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicReference</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.PrefetchedHeader</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1442">prefetchedHeader</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicReference</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.PrefetchedHeader</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1446">prefetchedHeader</a></pre>
 <div class="block">Cache of the NEXT header after this. Check it is indeed next blocks header
  before using it. TODO: Review. This overread into next block to fetch
  next blocks header seems unnecessary given we usually get the block size
@@ -419,7 +419,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>fileSize</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1445">fileSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1449">fileSize</a></pre>
 <div class="block">The size of the file we are reading from, or -1 if unknown.</div>
 </li>
 </ul>
@@ -429,7 +429,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>hdrSize</h4>
-<pre>protected final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1449">hdrSize</a></pre>
+<pre>protected final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1453">hdrSize</a></pre>
 <div class="block">The size of the header</div>
 </li>
 </ul>
@@ -439,7 +439,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>hfs</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/fs/HFileSystem.html" title="class in org.apache.hadoop.hbase.fs">HFileSystem</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1452">hfs</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/fs/HFileSystem.html" title="class in org.apache.hadoop.hbase.fs">HFileSystem</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1456">hfs</a></pre>
 <div class="block">The filesystem used to access data</div>
 </li>
 </ul>
@@ -449,7 +449,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>fileContext</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1454">fileContext</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1458">fileContext</a></pre>
 </li>
 </ul>
 <a name="pathName">
@@ -458,7 +458,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>pathName</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1456">pathName</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1460">pathName</a></pre>
 </li>
 </ul>
 <a name="streamLock">
@@ -467,7 +467,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockListLast">
 <li class="blockList">
 <h4>streamLock</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/Lock.html?is-external=true" title="class or interface in java.util.concurrent.locks">Lock</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1458">streamLock</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/Lock.html?is-external=true" title="class or interface in java.util.concurrent.locks">Lock</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1462">streamLock</a></pre>
 </li>
 </ul>
 </li>
@@ -484,7 +484,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>FSReaderImpl</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1460">FSReaderImpl</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a>&nbsp;stream,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1464">FSReaderImpl</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a>&nbsp;stream,
              long&nbsp;fileSize,
              <a href="../../../../../../org/apache/hadoop/hbase/fs/HFileSystem.html" title="class in org.apache.hadoop.hbase.fs">HFileSystem</a>&nbsp;hfs,
              org.apache.hadoop.fs.Path&nbsp;path,
@@ -502,7 +502,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FSReaderImpl</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1481">FSReaderImpl</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;istream,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1485">FSReaderImpl</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;istream,
              long&nbsp;fileSize,
              <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext)
       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -528,7 +528,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>blockRange</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockIterator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1487">blockRange</a>(long&nbsp;startOffset,
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockIterator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1491">blockRange</a>(long&nbsp;startOffset,
                                            long&nbsp;endOffset)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#blockRange-long-long-">HFileBlock.FSReader</a></code></span></div>
 <div class="block">Creates a block iterator over the given portion of the <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>.
@@ -553,7 +553,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>readAtOffset</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1535">readAtOffset</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;istream,
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1539">readAtOffset</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;istream,
                            byte[]&nbsp;dest,
                            int&nbsp;destOffset,
                            int&nbsp;size,
@@ -587,7 +587,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>readBlockData</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1586">readBlockData</a>(long&nbsp;offset,
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1590">readBlockData</a>(long&nbsp;offset,
                                 long&nbsp;onDiskSizeWithHeaderL,
                                 boolean&nbsp;pread,
                                 boolean&nbsp;updateMetrics)
@@ -616,7 +616,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>checkAndGetSizeAsInt</h4>
-<pre>private static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1655">checkAndGetSizeAsInt</a>(long&nbsp;onDiskSizeWithHeaderL,
+<pre>private static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1659">checkAndGetSizeAsInt</a>(long&nbsp;onDiskSizeWithHeaderL,
                                         int&nbsp;hdrSize)
                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -633,7 +633,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>verifyOnDiskSizeMatchesHeader</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1671">verifyOnDiskSizeMatchesHeader</a>(int&nbsp;passedIn,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1675">verifyOnDiskSizeMatchesHeader</a>(int&nbsp;passedIn,
                                            <a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;headerBuf,
                                            long&nbsp;offset,
                                            boolean&nbsp;verifyChecksum)
@@ -652,7 +652,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>getCachedHeader</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1692">getCachedHeader</a>(long&nbsp;offset)</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1696">getCachedHeader</a>(long&nbsp;offset)</pre>
 <div class="block">Check atomic reference cache for this block's header. Cache only good if next
  read coming through is next in sequence in the block. We read next block's
  header on the tail of reading the previous block to save a seek. Otherwise,
@@ -673,7 +673,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheNextBlockHeader</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1702">cacheNextBlockHeader</a>(long&nbsp;offset,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1706">cacheNextBlockHeader</a>(long&nbsp;offset,
                                   byte[]&nbsp;header,
                                   int&nbsp;headerOffset,
                                   int&nbsp;headerLength)</pre>
@@ -691,7 +691,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>readBlockDataInternal</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1725">readBlockDataInternal</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;is,
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1729">readBlockDataInternal</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;is,
                                            long&nbsp;offset,
                                            long&nbsp;onDiskSizeWithHeaderL,
                                            boolean&nbsp;pread,
@@ -723,7 +723,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>setIncludesMemStoreTS</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1813">setIncludesMemStoreTS</a>(boolean&nbsp;includesMemstoreTS)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1817">setIncludesMemStoreTS</a>(boolean&nbsp;includesMemstoreTS)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#setIncludesMemStoreTS-boolean-">setIncludesMemStoreTS</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a></code></dd>
@@ -736,7 +736,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>setDataBlockEncoder</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1818">setDataBlockEncoder</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;encoder)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1822">setDataBlockEncoder</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;encoder)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#setDataBlockEncoder-org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder-">setDataBlockEncoder</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a></code></dd>
@@ -749,7 +749,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockDecodingContext</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1823">getBlockDecodingContext</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1827">getBlockDecodingContext</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#getBlockDecodingContext--">HFileBlock.FSReader</a></code></span></div>
 <div class="block">Get a decoder for <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html#ENCODED_DATA"><code>BlockType.ENCODED_DATA</code></a> blocks from this file.</div>
 <dl>
@@ -764,7 +764,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>getDefaultBlockDecodingContext</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1828">getDefaultBlockDecodingContext</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1832">getDefaultBlockDecodingContext</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#getDefaultBlockDecodingContext--">HFileBlock.FSReader</a></code></span></div>
 <div class="block">Get the default decoder for blocks from this file.</div>
 <dl>
@@ -779,7 +779,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>validateChecksum</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1837">validateChecksum</a>(long&nbsp;offset,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1841">validateChecksum</a>(long&nbsp;offset,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;data,
                                  int&nbsp;hdrSize)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -799,7 +799,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>closeStreams</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1851">closeStreams</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1855">closeStreams</a>()
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#closeStreams--">HFileBlock.FSReader</a></code></span></div>
 <div class="block">Closes the backing streams</div>
@@ -817,7 +817,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>unbufferStream</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1856">unbufferStream</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1860">unbufferStream</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#unbufferStream--">HFileBlock.FSReader</a></code></span></div>
 <div class="block">To close the stream's socket. Note: This can be concurrently called from multiple threads and
  implementation should take care of thread safety.</div>
@@ -833,7 +833,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1869">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1873">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html
index 66ee7a3..467b797 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1412">HFileBlock.PrefetchedHeader</a>
+<pre>private static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1416">HFileBlock.PrefetchedHeader</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Data-structure to use caching the header of the NEXT block. Only works if next read
  that comes in here is next in sequence in this block.
@@ -217,7 +217,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>offset</h4>
-<pre>long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1413">offset</a></pre>
+<pre>long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1417">offset</a></pre>
 </li>
 </ul>
 <a name="header">
@@ -226,7 +226,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>header</h4>
-<pre>byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1414">header</a></pre>
+<pre>byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1418">header</a></pre>
 </li>
 </ul>
 <a name="buf">
@@ -235,7 +235,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>buf</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1415">buf</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1419">buf</a></pre>
 </li>
 </ul>
 </li>
@@ -252,7 +252,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>PrefetchedHeader</h4>
-<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1412">PrefetchedHeader</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1416">PrefetchedHeader</a>()</pre>
 </li>
 </ul>
 </li>
@@ -269,7 +269,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1418">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1422">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
index 9b248ec..40ce202 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static enum <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.836">HFileBlock.Writer.State</a>
+<pre>private static enum <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.840">HFileBlock.Writer.State</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>&gt;</pre>
 </li>
 </ul>
@@ -213,7 +213,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>INIT</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.837">INIT</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.841">INIT</a></pre>
 </li>
 </ul>
 <a name="WRITING">
@@ -222,7 +222,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>WRITING</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.838">WRITING</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.842">WRITING</a></pre>
 </li>
 </ul>
 <a name="BLOCK_READY">
@@ -231,7 +231,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BLOCK_READY</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.839">BLOCK_READY</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.843">BLOCK_READY</a></pre>
 </li>
 </ul>
 </li>
@@ -248,7 +248,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.836">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.840">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -268,7 +268,7 @@ for (HFileBlock.Writer.State c : HFileBlock.Writer.State.values())
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.836">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.840">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html
index 6079b85..0b3dd0d 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.835">HFileBlock.Writer</a>
+<pre>static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.839">HFileBlock.Writer</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Unified version 2 <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> block writer. The intended usage pattern
  is as follows:
@@ -459,7 +459,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>state</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.843">state</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.847">state</a></pre>
 <div class="block">Writer state. Used to ensure the correct usage protocol.</div>
 </li>
 </ul>
@@ -469,7 +469,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>dataBlockEncoder</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.846">dataBlockEncoder</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.850">dataBlockEncoder</a></pre>
 <div class="block">Data block encoder used for data blocks</div>
 </li>
 </ul>
@@ -479,7 +479,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>dataBlockEncodingCtx</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.848">dataBlockEncodingCtx</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.852">dataBlockEncodingCtx</a></pre>
 </li>
 </ul>
 <a name="defaultBlockEncodingCtx">
@@ -488,7 +488,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>defaultBlockEncodingCtx</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.html" title="class in org.apache.hadoop.hbase.io.encoding">HFileBlockDefaultEncodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.851">defaultBlockEncodingCtx</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.html" title="class in org.apache.hadoop.hbase.io.encoding">HFileBlockDefaultEncodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.855">defaultBlockEncodingCtx</a></pre>
 <div class="block">block encoding context for non-data blocks</div>
 </li>
 </ul>
@@ -498,7 +498,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>baosInMemory</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/ByteArrayOutputStream.html" title="class in org.apache.hadoop.hbase.io">ByteArrayOutputStream</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.859">baosInMemory</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/ByteArrayOutputStream.html" title="class in org.apache.hadoop.hbase.io">ByteArrayOutputStream</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.863">baosInMemory</a></pre>
 <div class="block">The stream we use to accumulate data into a block in an uncompressed format.
  We reset this stream at the end of each block and reuse it. The
  header is written as the first <a href="../../../../../../org/apache/hadoop/hbase/HConstants.html#HFILEBLOCK_HEADER_SIZE"><code>HConstants.HFILEBLOCK_HEADER_SIZE</code></a> bytes into this
@@ -511,7 +511,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>blockType</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.866">blockType</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.870">blockType</a></pre>
 <div class="block">Current block type. Set in <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#startWriting-org.apache.hadoop.hbase.io.hfile.BlockType-"><code>startWriting(BlockType)</code></a>. Could be
  changed in <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#finishBlock--"><code>finishBlock()</code></a> from <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html#DATA"><code>BlockType.DATA</code></a>
  to <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html#ENCODED_DATA"><code>BlockType.ENCODED_DATA</code></a>.</div>
@@ -523,7 +523,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>userDataStream</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.872">userDataStream</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.876">userDataStream</a></pre>
 <div class="block">A stream that we write uncompressed bytes to, which compresses them and
  writes them to <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#baosInMemory"><code>baosInMemory</code></a>.</div>
 </li>
@@ -534,7 +534,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>unencodedDataSizeWritten</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.876">unencodedDataSizeWritten</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.880">unencodedDataSizeWritten</a></pre>
 </li>
 </ul>
 <a name="encodedDataSizeWritten">
@@ -543,7 +543,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>encodedDataSizeWritten</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.880">encodedDataSizeWritten</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.884">encodedDataSizeWritten</a></pre>
 </li>
 </ul>
 <a name="onDiskBlockBytesWithHeader">
@@ -552,7 +552,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>onDiskBlockBytesWithHeader</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/ByteArrayOutputStream.html" title="class in org.apache.hadoop.hbase.io">ByteArrayOutputStream</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.887">onDiskBlockBytesWithHeader</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/ByteArrayOutputStream.html" title="class in org.apache.hadoop.hbase.io">ByteArrayOutputStream</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.891">onDiskBlockBytesWithHeader</a></pre>
 <div class="block">Bytes to be written to the file system, including the header. Compressed
  if compression is turned on. It also includes the checksum data that
  immediately follows the block data. (header + data + checksums)</div>
@@ -564,7 +564,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>onDiskChecksum</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.895">onDiskChecksum</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.899">onDiskChecksum</a></pre>
 <div class="block">The size of the checksum data on disk. It is used only if data is
  not compressed. If data is compressed, then the checksums are already
  part of onDiskBytesWithHeader. If data is uncompressed, then this
@@ -577,7 +577,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>startOffset</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.901">startOffset</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.905">startOffset</a></pre>
 <div class="block">Current block's start offset in the <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>. Set in
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#writeHeaderAndData-org.apache.hadoop.fs.FSDataOutputStream-"><code>writeHeaderAndData(FSDataOutputStream)</code></a>.</div>
 </li>
@@ -588,7 +588,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>prevOffsetByType</h4>
-<pre>private&nbsp;long[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.907">prevOffsetByType</a></pre>
+<pre>private&nbsp;long[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.911">prevOffsetByType</a></pre>
 <div class="block">Offset of previous block by block type. Updated when the next block is
  started.</div>
 </li>
@@ -599,7 +599,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>prevOffset</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.910">prevOffset</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.914">prevOffset</a></pre>
 <div class="block">The offset of the previous block of the same type</div>
 </li>
 </ul>
@@ -609,7 +609,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fileContext</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.912">fileContext</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.916">fileContext</a></pre>
 <div class="block">Meta data that holds information about the hfileblock</div>
 </li>
 </ul>
@@ -627,7 +627,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>Writer</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.917">Writer</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;dataBlockEncoder,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.921">Writer</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;dataBlockEncoder,
               <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext)</pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -649,7 +649,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>startWriting</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.947">startWriting</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;newBlockType)
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.951">startWriting</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;newBlockType)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts writing into the block. The previous block's data is discarded.</div>
 <dl>
@@ -666,7 +666,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>write</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.978">write</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.982">write</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Writes the Cell to this block</div>
 <dl>
@@ -683,7 +683,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getUserDataStream</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.993">getUserDataStream</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.997">getUserDataStream</a>()</pre>
 <div class="block">Returns the stream for the user to write to. The block writer takes care
  of handling compression and buffering for caching on write. Can only be
  called in the "writing" state.</div>
@@ -699,7 +699,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>ensureBlockReady</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1002">ensureBlockReady</a>()
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1006">ensureBlockReady</a>()
                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Transitions the block writer from the "writing" state to the "block
  ready" state.  Does nothing if a block is already finished.</div>
@@ -715,7 +715,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>finishBlock</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1020">finishBlock</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1024">finishBlock</a>()
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Finish up writing of the block.
  Flushes the compressing stream (if using compression), fills out the header,
@@ -733,7 +733,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>putHeader</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1074">putHeader</a>(byte[]&nbsp;dest,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1078">putHeader</a>(byte[]&nbsp;dest,
                        int&nbsp;offset,
                        int&nbsp;onDiskSize,
                        int&nbsp;uncompressedSize,
@@ -755,7 +755,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>putHeader</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1085">putHeader</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/ByteArrayOutputStream.html" title="class in org.apache.hadoop.hbase.io">ByteArrayOutputStream</a>&nbsp;dest,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1089">putHeader</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/ByteArrayOutputStream.html" title="class in org.apache.hadoop.hbase.io">ByteArrayOutputStream</a>&nbsp;dest,
                        int&nbsp;onDiskSize,
                        int&nbsp;uncompressedSize,
                        int&nbsp;onDiskDataSize)</pre>
@@ -767,7 +767,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>writeHeaderAndData</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1098">writeHeaderAndData</a>(org.apache.hadoop.fs.FSDataOutputStream&nbsp;out)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1102">writeHeaderAndData</a>(org.apache.hadoop.fs.FSDataOutputStream&nbsp;out)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Similar to <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#writeHeaderAndData-org.apache.hadoop.fs.FSDataOutputStream-"><code>writeHeaderAndData(FSDataOutputStream)</code></a>, but records
  the offset of this block so that it can be referenced in the next block
@@ -786,7 +786,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>finishBlockAndWriteHeaderAndData</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1119">finishBlockAndWriteHeaderAndData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1123">finishBlockAndWriteHeaderAndData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Writes the header and the compressed data of this block (or uncompressed
  data when not using compression) into the given stream. Can be called in
@@ -806,7 +806,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getHeaderAndDataForTest</h4>
-<pre>byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1138">getHeaderAndDataForTest</a>()
+<pre>byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1142">getHeaderAndDataForTest</a>()
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns the header or the compressed data (or uncompressed data when not
  using compression) as a byte array. Can be called in the "writing" state
@@ -827,7 +827,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>release</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1155">release</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1159">release</a>()</pre>
 <div class="block">Releases resources used by this writer.</div>
 </li>
 </ul>
@@ -837,7 +837,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnDiskSizeWithoutHeader</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1174">getOnDiskSizeWithoutHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1178">getOnDiskSizeWithoutHeader</a>()</pre>
 <div class="block">Returns the on-disk size of the data portion of the block. This is the
  compressed size if compression is enabled. Can only be called in the
  "block ready" state. Header is not compressed, and its size is not
@@ -854,7 +854,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnDiskSizeWithHeader</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1187">getOnDiskSizeWithHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1191">getOnDiskSizeWithHeader</a>()</pre>
 <div class="block">Returns the on-disk size of the block. Can only be called in the
  "block ready" state.</div>
 <dl>
@@ -870,7 +870,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getUncompressedSizeWithoutHeader</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1195">getUncompressedSizeWithoutHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1199">getUncompressedSizeWithoutHeader</a>()</pre>
 <div class="block">The uncompressed size of the block data. Does not include header size.</div>
 </li>
 </ul>
@@ -880,7 +880,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getUncompressedSizeWithHeader</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1203">getUncompressedSizeWithHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1207">getUncompressedSizeWithHeader</a>()</pre>
 <div class="block">The uncompressed size of the block data, including header size.</div>
 </li>
 </ul>
@@ -890,7 +890,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isWriting</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1209">isWriting</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1213">isWriting</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if a block is being written</dd>
@@ -903,7 +903,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>encodedBlockSizeWritten</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1220">encodedBlockSizeWritten</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1224">encodedBlockSizeWritten</a>()</pre>
 <div class="block">Returns the number of bytes written into the current block so far, or
  zero if not writing the block at the moment. Note that this will return
  zero in the "block ready" state as well.</div>
@@ -919,7 +919,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>blockSizeWritten</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1233">blockSizeWritten</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1237">blockSizeWritten</a>()</pre>
 <div class="block">Returns the number of bytes written into the current block so far, or
  zero if not writing the block at the moment. Note that this will return
  zero in the "block ready" state as well.</div>
@@ -935,7 +935,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>cloneUncompressedBufferWithHeader</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1247">cloneUncompressedBufferWithHeader</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1251">cloneUncompressedBufferWithHeader</a>()</pre>
 <div class="block">Clones the header followed by the uncompressed data, even if using
  compression. This is needed for storing uncompressed blocks in the block
  cache. Can be called in the "writing" state or the "block ready" state.
@@ -952,7 +952,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>cloneOnDiskBufferWithHeader</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1267">cloneOnDiskBufferWithHeader</a>()</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1271">cloneOnDiskBufferWithHeader</a>()</pre>
 <div class="block">Clones the header followed by the on-disk (compressed/encoded/encrypted) data. This is
  needed for storing packed blocks in the block cache. Expects calling semantics identical to
  <code>#getUncompressedBufferWithHeader()</code>. Returns only the header and data,
@@ -969,7 +969,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>expectState</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1272">expectState</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>&nbsp;expectedState)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1276">expectState</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>&nbsp;expectedState)</pre>
 </li>
 </ul>
 <a name="writeBlock-org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable-org.apache.hadoop.fs.FSDataOutputStream-">
@@ -978,7 +978,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>writeBlock</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1289">writeBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockWritable</a>&nbsp;bw,
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1293">writeBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockWritable</a>&nbsp;bw,
                 org.apache.hadoop.fs.FSDataOutputStream&nbsp;out)
          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Takes the given <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>HFileBlock.BlockWritable</code></a> instance, creates a new block of
@@ -1000,7 +1000,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getBlockForCaching</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1308">getBlockForCaching</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1312">getBlockForCaching</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
 <div class="block">Creates a new HFileBlock. Checksums have already been validated, so
  the byte buffer passed into the constructor of this newly created
  block does not have checksum data even though the header minor


[26/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
index b7b4236..3d1edb3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
@@ -259,1863 +259,1867 @@
 <span class="sourceLineNo">251</span>   * + Metadata!  + &lt;= See note on BLOCK_METADATA_SPACE above.<a name="line.251"></a>
 <span class="sourceLineNo">252</span>   * ++++++++++++++<a name="line.252"></a>
 <span class="sourceLineNo">253</span>   * &lt;/code&gt;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer)<a name="line.254"></a>
+<span class="sourceLineNo">254</span>   * @see #serialize(ByteBuffer, boolean)<a name="line.254"></a>
 <span class="sourceLineNo">255</span>   */<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER =<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    @Override<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      // The buf has the file block followed by block metadata.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      ByteBuff newByteBuff;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      if (reuse) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        newByteBuff = buf.slice();<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      } else {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        int len = buf.limit();<a name="line.269"></a>
-<span class="sourceLineNo">270</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      buf.position(buf.limit());<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      long offset = buf.getLong();<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      HFileBlock hFileBlock =<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      return hFileBlock;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    public int getDeserialiserIdentifier() {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return DESERIALIZER_IDENTIFIER;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    @Override<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      // Used only in tests<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  };<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  static {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    DESERIALIZER_IDENTIFIER =<a name="line.298"></a>
-<span class="sourceLineNo">299</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>  }<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  /**<a name="line.302"></a>
-<span class="sourceLineNo">303</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.303"></a>
-<span class="sourceLineNo">304</span>   */<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private HFileBlock(HFileBlock that) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    this(that, false);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
-<span class="sourceLineNo">308</span><a name="line.308"></a>
-<span class="sourceLineNo">309</span>  /**<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.310"></a>
-<span class="sourceLineNo">311</span>   * param.<a name="line.311"></a>
-<span class="sourceLineNo">312</span>   */<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.314"></a>
-<span class="sourceLineNo">315</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    if (bufCopy) {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    } else {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.buf = that.buf.duplicate();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    }<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * is used only while writing blocks and caching,<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   *<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.331"></a>
+<span class="sourceLineNo">256</span>  public static final CacheableDeserializer&lt;Cacheable&gt; BLOCK_DESERIALIZER = new BlockDeserializer();<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  public static final class BlockDeserializer implements CacheableDeserializer&lt;Cacheable&gt; {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    private BlockDeserializer() {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>    @Override<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        throws IOException {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      // The buf has the file block followed by block metadata.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      // Set limit to just before the BLOCK_METADATA_SPACE then rewind.<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      // Get a new buffer to pass the HFileBlock for it to 'own'.<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      ByteBuff newByteBuff;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      if (reuse) {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>        newByteBuff = buf.slice();<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      } else {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        int len = buf.limit();<a name="line.273"></a>
+<span class="sourceLineNo">274</span>        newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        newByteBuff.put(0, buf, buf.position(), len);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      buf.position(buf.limit());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      boolean usesChecksum = buf.get() == (byte) 1;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      long offset = buf.getLong();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      int nextBlockOnDiskSize = buf.getInt();<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      HFileBlock hFileBlock =<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      return hFileBlock;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>    @Override<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    public int getDeserialiserIdentifier() {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      return DESERIALIZER_IDENTIFIER;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>    @Override<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      // Used only in tests<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private static final int DESERIALIZER_IDENTIFIER;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  static {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    DESERIALIZER_IDENTIFIER =<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        CacheableDeserializerIdManager.registerDeserializer(BLOCK_DESERIALIZER);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  }<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /**<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   */<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private HFileBlock(HFileBlock that) {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    this(that, false);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Copy constructor. Creates a shallow/deep copy of {@code that}'s buffer as per the boolean<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * param.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private HFileBlock(HFileBlock that, boolean bufCopy) {<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    init(that.blockType, that.onDiskSizeWithoutHeader,<a name="line.318"></a>
+<span class="sourceLineNo">319</span>        that.uncompressedSizeWithoutHeader, that.prevBlockOffset,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        that.offset, that.onDiskDataSizeWithHeader, that.nextBlockOnDiskSize, that.fileContext);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    if (bufCopy) {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>      this.buf = new SingleByteBuff(ByteBuffer.wrap(that.buf.toBytes(0, that.buf.limit())));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    } else {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>      this.buf = that.buf.duplicate();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    }<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * is used only while writing blocks and caching,<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * and is sitting in a byte buffer and we want to stuff the block into cache.<a name="line.331"></a>
 <span class="sourceLineNo">332</span>   *<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @param offset the file offset the block was read from<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   * @param fileContext HFile meta data<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  @VisibleForTesting<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      HFileContext fileContext) {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    this.buf = new SingleByteBuff(b);<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    if (fillHeader) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      overwriteHeader();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    this.buf.rewind();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  /**<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * to that point.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>    buf.rewind();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    final BlockType blockType = BlockType.read(buf);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    final int uncompressedSizeWithoutHeader =<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    int onDiskDataSizeWithHeader;<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    if (usesHBaseChecksum) {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    } else {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    fileContext = fileContextBuilder.build();<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    this.memType = memType;<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    this.offset = offset;<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    this.buf = buf;<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    this.buf.rewind();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  }<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  /**<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   * Called from constructors.<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      HFileContext fileContext) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    this.blockType = blockType;<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    this.offset = offset;<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    this.fileContext = fileContext;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  /**<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   * Parse total on disk size including header and checksum.<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.420"></a>
-<span class="sourceLineNo">421</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>   * @return Size of the block with header included.<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   */<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      boolean verifyChecksum) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      headerSize(verifyChecksum);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * a read of the next block when scanning or running over a file.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  int getNextBlockOnDiskSize() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    return nextBlockOnDiskSize;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  @Override<a name="line.439"></a>
-<span class="sourceLineNo">440</span>  public BlockType getBlockType() {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    return blockType;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  short getDataBlockEncodingId() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    return buf.getShort(headerSize());<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  }<a name="line.451"></a>
-<span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>  /**<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   * @return the on-disk size of header + data part + checksum.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>   */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  public int getOnDiskSizeWithHeader() {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  int getOnDiskSizeWithoutHeader() {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    return onDiskSizeWithoutHeader;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>  }<a name="line.465"></a>
-<span class="sourceLineNo">466</span><a name="line.466"></a>
-<span class="sourceLineNo">467</span>  /**<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.468"></a>
-<span class="sourceLineNo">469</span>   */<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   int getUncompressedSizeWithoutHeader() {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    return uncompressedSizeWithoutHeader;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /**<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   *         -1 if unknown<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  long getPrevBlockOffset() {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    return prevBlockOffset;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>  }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>  /**<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * is modified as side-effect.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   */<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  private void overwriteHeader() {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    buf.rewind();<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    blockType.write(buf);<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    buf.putLong(prevBlockOffset);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>  }<a name="line.497"></a>
-<span class="sourceLineNo">498</span><a name="line.498"></a>
-<span class="sourceLineNo">499</span>  /**<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * Returns a buffer that does not include the header or checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   *<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    ByteBuff dup = getBufferReadOnly();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    // Now set it up so Buffer spans content only -- no header or no checksums.<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    return dup.position(headerSize()).limit(buf.limit() - totalChecksumBytes()).slice();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Returns a read-only duplicate of the buffer this block stores internally ready to be read.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * Clients must not modify the buffer object though they may set position and limit on the<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   * returned buffer since we pass back a duplicate. This method has to be public because it is used<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * filter lookup, but has to be used with caution. Buffer holds header, block content,<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * and any follow-on checksums if present.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   *<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @return the buffer of this block for read-only operations<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   */<a name="line.519"></a>
-<span class="sourceLineNo">520</span>  public ByteBuff getBufferReadOnly() {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    // TODO: ByteBuf does not support asReadOnlyBuffer(). Fix.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    ByteBuff dup = this.buf.duplicate();<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    assert dup.position() == 0;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return dup;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  @VisibleForTesting<a name="line.527"></a>
-<span class="sourceLineNo">528</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String fieldName) throws IOException {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    if (valueFromBuf != valueFromField) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  @VisibleForTesting<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      throws IOException {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    if (valueFromBuf != valueFromField) {<a name="line.539"></a>
-<span class="sourceLineNo">540</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.540"></a>
-<span class="sourceLineNo">541</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  }<a name="line.543"></a>
-<span class="sourceLineNo">544</span><a name="line.544"></a>
-<span class="sourceLineNo">545</span>  /**<a name="line.545"></a>
-<span class="sourceLineNo">546</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.547"></a>
-<span class="sourceLineNo">548</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * This function is primary for testing and debugging, and is not<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * Used by tests only.<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   */<a name="line.552"></a>
-<span class="sourceLineNo">553</span>  @VisibleForTesting<a name="line.553"></a>
-<span class="sourceLineNo">554</span>  void sanityCheck() throws IOException {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>    // Duplicate so no side-effects<a name="line.555"></a>
-<span class="sourceLineNo">556</span>    ByteBuff dup = this.buf.duplicate().rewind();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    sanityCheckAssertion(BlockType.read(dup), blockType);<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    sanityCheckAssertion(dup.getInt(), onDiskSizeWithoutHeader, "onDiskSizeWithoutHeader");<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    sanityCheckAssertion(dup.getInt(), uncompressedSizeWithoutHeader,<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        "uncompressedSizeWithoutHeader");<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    sanityCheckAssertion(dup.getLong(), prevBlockOffset, "prevBlockOffset");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      sanityCheckAssertion(dup.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      sanityCheckAssertion(dup.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.567"></a>
-<span class="sourceLineNo">568</span>          "bytesPerChecksum");<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      sanityCheckAssertion(dup.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
-<span class="sourceLineNo">571</span><a name="line.571"></a>
-<span class="sourceLineNo">572</span>    int cksumBytes = totalChecksumBytes();<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (dup.limit() != expectedBufLimit) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      throw new AssertionError("Expected limit " + expectedBufLimit + ", got " + dup.limit());<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    int hdrSize = headerSize();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    if (dup.capacity() != expectedBufLimit &amp;&amp; dup.capacity() != expectedBufLimit + hdrSize) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      throw new AssertionError("Invalid buffer capacity: " + dup.capacity() +<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  @Override<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  public String toString() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    StringBuilder sb = new StringBuilder()<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      .append("[")<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      .append("blockType=").append(blockType)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      .append(", fileOffset=").append(offset)<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      .append(", headerSize=").append(headerSize())<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      .append(", onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      .append(", uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      .append(", prevBlockOffset=").append(prevBlockOffset)<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      .append(", isUseHBaseChecksum=").append(fileContext.isUseHBaseChecksum());<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      sb.append(", checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.599"></a>
-<span class="sourceLineNo">600</span>        .append(", bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        .append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      sb.append(", onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    String dataBegin = null;<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    if (buf.hasArray()) {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    } else {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.613"></a>
-<span class="sourceLineNo">614</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    }<a name="line.617"></a>
-<span class="sourceLineNo">618</span>    sb.append(", getOnDiskSizeWithHeader=").append(getOnDiskSizeWithHeader())<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      .append(", totalChecksumBytes=").append(totalChecksumBytes())<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      .append(", isUnpacked=").append(isUnpacked())<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      .append(", buf=[").append(buf).append("]")<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      .append(", dataBeginsWith=").append(dataBegin)<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      .append(", fileContext=").append(fileContext)<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      .append(", nextBlockOnDiskSize=").append(nextBlockOnDiskSize)<a name="line.624"></a>
-<span class="sourceLineNo">625</span>      .append("]");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    return sb.toString();<a name="line.626"></a>
-<span class="sourceLineNo">627</span>  }<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>  /**<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      // encryption details.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();<a name="line.645"></a>
-<span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>    ByteBuff dup = this.buf.duplicate();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    dup.position(this.headerSize());<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    dup = dup.slice();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      dup);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    return unpacked;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  /**<a name="line.656"></a>
-<span class="sourceLineNo">657</span>   * Always allocates a new buffer of the correct size. Copies header bytes<a name="line.657"></a>
-<span class="sourceLineNo">658</span>   * from the existing buffer. Does not change header fields.<a name="line.658"></a>
-<span class="sourceLineNo">659</span>   * Reserve room to keep checksum bytes too.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>   */<a name="line.660"></a>
-<span class="sourceLineNo">661</span>  private void allocateBuffer() {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    int cksumBytes = totalChecksumBytes();<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    int headerSize = headerSize();<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    int capacityNeeded = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // TODO we need consider allocating offheap here?<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);<a name="line.667"></a>
-<span class="sourceLineNo">668</span><a name="line.668"></a>
-<span class="sourceLineNo">669</span>    // Copy header bytes into newBuf.<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    // newBuf is HBB so no issue in calling array()<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    buf.position(0);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    buf.get(newBuf.array(), newBuf.arrayOffset(), headerSize);<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    buf = new SingleByteBuff(newBuf);<a name="line.674"></a>
-<span class="sourceLineNo">675</span>    // set limit to exclude next block's header<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * Return true when this block's buffer has been unpacked, false otherwise. Note this is a<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   * calculated heuristic, not tracked attribute of the block.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>   */<a name="line.682"></a>
-<span class="sourceLineNo">683</span>  public boolean isUnpacked() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>    final int cksumBytes = totalChecksumBytes();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    final int headerSize = headerSize();<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    final int bufCapacity = buf.capacity();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;<a name="line.688"></a>
-<span class="sourceLineNo">689</span>  }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>  /** An additional sanity-check in case no compression or encryption is being used. */<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  @VisibleForTesting<a name="line.692"></a>
-<span class="sourceLineNo">693</span>  void sanityCheckUncompressedSize() throws IOException {<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader + totalChecksumBytes()) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      throw new IOException("Using no compression but "<a name="line.695"></a>
-<span class="sourceLineNo">696</span>          + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "<a name="line.696"></a>
-<span class="sourceLineNo">697</span>          + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader<a name="line.697"></a>
-<span class="sourceLineNo">698</span>          + ", numChecksumbytes=" + totalChecksumBytes());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    }<a name="line.699"></a>
-<span class="sourceLineNo">700</span>  }<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>  /**<a name="line.702"></a>
-<span class="sourceLineNo">703</span>   * Cannot be {@link #UNSET}. Must be a legitimate value. Used re-making the {@link BlockCacheKey} when<a name="line.703"></a>
-<span class="sourceLineNo">704</span>   * block is returned to the cache.<a name="line.704"></a>
-<span class="sourceLineNo">705</span>   * @return the offset of this block in the file it was read from<a name="line.705"></a>
-<span class="sourceLineNo">706</span>   */<a name="line.706"></a>
-<span class="sourceLineNo">707</span>  long getOffset() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    if (offset &lt; 0) {<a name="line.708"></a>
-<span class="sourceLineNo">709</span>      throw new IllegalStateException("HFile block offset not initialized properly");<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    }<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    return offset;<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  /**<a name="line.714"></a>
-<span class="sourceLineNo">715</span>   * @return a byte stream reading the data + checksum of this block<a name="line.715"></a>
-<span class="sourceLineNo">716</span>   */<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  DataInputStream getByteStream() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    ByteBuff dup = this.buf.duplicate();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    dup.position(this.headerSize());<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new DataInputStream(new ByteBuffInputStream(dup));<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>  public long heapSize() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    long size = ClassSize.align(<a name="line.725"></a>
-<span class="sourceLineNo">726</span>        ClassSize.OBJECT +<a name="line.726"></a>
-<span class="sourceLineNo">727</span>        // Block type, multi byte buffer, MemoryType and meta references<a name="line.727"></a>
-<span class="sourceLineNo">728</span>        4 * ClassSize.REFERENCE +<a name="line.728"></a>
-<span class="sourceLineNo">729</span>        // On-disk size, uncompressed size, and next block's on-disk size<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        // bytePerChecksum and onDiskDataSize<a name="line.730"></a>
-<span class="sourceLineNo">731</span>        4 * Bytes.SIZEOF_INT +<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        // This and previous block offset<a name="line.732"></a>
-<span class="sourceLineNo">733</span>        2 * Bytes.SIZEOF_LONG +<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        // Heap size of the meta object. meta will be always not null.<a name="line.734"></a>
-<span class="sourceLineNo">735</span>        fileContext.heapSize()<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    );<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    if (buf != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      // Deep overhead of the byte buffer. Needs to be aligned separately.<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    return ClassSize.align(size);<a name="line.743"></a>
-<span class="sourceLineNo">744</span>  }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>  /**<a name="line.746"></a>
-<span class="sourceLineNo">747</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.747"></a>
-<span class="sourceLineNo">748</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.748"></a>
-<span class="sourceLineNo">749</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but specifies a<a name="line.749"></a>
-<span class="sourceLineNo">750</span>   * number of "extra" bytes to also optionally read.<a name="line.750"></a>
-<span class="sourceLineNo">751</span>   *<a name="line.751"></a>
-<span class="sourceLineNo">752</span>   * @param in the input stream to read from<a name="line.752"></a>
-<span class="sourceLineNo">753</span>   * @param buf the buffer to read into<a name="line.753"></a>
-<span class="sourceLineNo">754</span>   * @param bufOffset the destination offset in the buffer<a name="line.754"></a>
-<span class="sourceLineNo">755</span>   * @param necessaryLen the number of bytes that are absolutely necessary to read<a name="line.755"></a>
-<span class="sourceLineNo">756</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.756"></a>
-<span class="sourceLineNo">757</span>   * @return true if succeeded reading the extra bytes<a name="line.757"></a>
-<span class="sourceLineNo">758</span>   * @throws IOException if failed to read the necessary bytes<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   */<a name="line.759"></a>
-<span class="sourceLineNo">760</span>  static boolean readWithExtra(InputStream in, byte[] buf,<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      int bufOffset, int necessaryLen, int extraLen) throws IOException {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    while (bytesRemaining &gt; 0) {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      int ret = in.read(buf, bufOffset, bytesRemaining);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      if (ret == -1 &amp;&amp; bytesRemaining &lt;= extraLen) {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        // We could not read the "extra data", but that is OK.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>        break;<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      }<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (ret &lt; 0) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>        throw new IOException("Premature EOF from inputStream (read "<a name="line.770"></a>
-<span class="sourceLineNo">771</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.771"></a>
-<span class="sourceLineNo">772</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.772"></a>
-<span class="sourceLineNo">773</span>            + "successfully read "<a name="line.773"></a>
-<span class="sourceLineNo">774</span>            + (necessaryLen + extraLen - bytesRemaining));<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      }<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      bufOffset += ret;<a name="line.776"></a>
-<span class="sourceLineNo">777</span>      bytesRemaining -= ret;<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    }<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    return bytesRemaining &lt;= 0;<a name="line.779"></a>
-<span class="sourceLineNo">780</span>  }<a name="line.780"></a>
-<span class="sourceLineNo">781</span><a name="line.781"></a>
-<span class="sourceLineNo">782</span>  /**<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * Read from an input stream at least &lt;code&gt;necessaryLen&lt;/code&gt; and if possible,<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   * &lt;code&gt;extraLen&lt;/code&gt; also if available. Analogous to<a name="line.784"></a>
-<span class="sourceLineNo">785</span>   * {@link IOUtils#readFully(InputStream, byte[], int, int)}, but uses<a name="line.785"></a>
-<span class="sourceLineNo">786</span>   * positional read and specifies a number of "extra" bytes that would be<a name="line.786"></a>
-<span class="sourceLineNo">787</span>   * desirable but not absolutely necessary to read.<a name="line.787"></a>
-<span class="sourceLineNo">788</span>   *<a name="line.788"></a>
-<span class="sourceLineNo">789</span>   * @param in the input stream to read from<a name="line.789"></a>
-<span class="sourceLineNo">790</span>   * @param position the position within the stream from which to start reading<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * @param buf the buffer to read into<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * @param bufOffset the destination offset in the buffer<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   * @param necessaryLen the number of bytes that are absolutely necessary to<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   *     read<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @param extraLen the number of extra bytes that would be nice to read<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   * @return true if and only if extraLen is &gt; 0 and reading those extra bytes<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   *     was successful<a name="line.797"></a>
-<span class="sourceLineNo">798</span>   * @throws IOException if failed to read the necessary bytes<a name="line.798"></a>
-<span class="sourceLineNo">799</span>   */<a name="line.799"></a>
-<span class="sourceLineNo">800</span>  @VisibleForTesting<a name="line.800"></a>
-<span class="sourceLineNo">801</span>  static boolean positionalReadWithExtra(FSDataInputStream in,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>      long position, byte[] buf, int bufOffset, int necessaryLen, int extraLen)<a name="line.802"></a>
-<span class="sourceLineNo">803</span>      throws IOException {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    int bytesRemaining = necessaryLen + extraLen;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>    int bytesRead = 0;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    while (bytesRead &lt; necessaryLen) {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      int ret = in.read(position, buf, bufOffset, bytesRemaining);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>      if (ret &lt; 0) {<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        throw new IOException("Premature EOF from inputStream (positional read "<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            + "returned " + ret + ", was trying to read " + necessaryLen<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            + " necessary bytes and " + extraLen + " extra bytes, "<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            + "successfully read " + bytesRead);<a name="line.812"></a>
-<span class="sourceLineNo">813</span>      }<a name="line.813"></a>
-<span class="sourceLineNo">814</span>      position += ret;<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      bufOffset += ret;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      bytesRemaining -= ret;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      bytesRead += ret;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>    }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    return bytesRead != necessaryLen &amp;&amp; bytesRemaining &lt;= 0;<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>   * Unified version 2 {@link HFile} block writer. The intended usage pattern<a name="line.823"></a>
-<span class="sourceLineNo">824</span>   * is as follows:<a name="line.824"></a>
-<span class="sourceLineNo">825</span>   * &lt;ol&gt;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>   * &lt;li&gt;Construct an {@link HFileBlock.Writer}, providing a compression algorithm.<a name="line.826"></a>
-<span class="sourceLineNo">827</span>   * &lt;li&gt;Call {@link Writer#startWriting} and get a data stream to write to.<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * &lt;li&gt;Write your data into the stream.<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * &lt;li&gt;Call Writer#writeHeaderAndData(FSDataOutputStream) as many times as you need to.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   * store the serialized block into an external stream.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * &lt;li&gt;Repeat to write more blocks.<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   * &lt;/ol&gt;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   * &lt;p&gt;<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   */<a name="line.834"></a>
-<span class="sourceLineNo">835</span>  static class Writer {<a name="line.835"></a>
-<span class="sourceLineNo">836</span>    private enum State {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>      INIT,<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      WRITING,<a name="line.838"></a>
-<span class="sourceLineNo">839</span>      BLOCK_READY<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
-<span class="sourceLineNo">841</span><a name="line.841"></a>
-<span class="sourceLineNo">842</span>    /** Writer state. Used to ensure the correct usage protocol. */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    private State state = State.INIT;<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>    /** Data block encoder used for data blocks */<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    private final HFileDataBlockEncoder dataBlockEncoder;<a name="line.846"></a>
-<span class="sourceLineNo">847</span><a name="line.847"></a>
-<span class="sourceLineNo">848</span>    private HFileBlockEncodingContext dataBlockEncodingCtx;<a name="line.848"></a>
-<span class="sourceLineNo">849</span><a name="line.849"></a>
-<span class="sourceLineNo">850</span>    /** block encoding context for non-data blocks*/<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;<a name="line.851"></a>
-<span class="sourceLineNo">852</span><a name="line.852"></a>
-<span class="sourceLineNo">853</span>    /**<a name="line.853"></a>
-<span class="sourceLineNo">854</span>     * The stream we use to accumulate data into a block in an uncompressed format.<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * We reset this stream at the end of each block and reuse it. The<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * header is written as the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes into this<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * stream.<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    private ByteArrayOutputStream baosInMemory;<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>    /**<a name="line.861"></a>
-<span class="sourceLineNo">862</span>     * Current block type. Set in {@link #startWriting(BlockType)}. Could be<a name="line.862"></a>
-<span class="sourceLineNo">863</span>     * changed in {@link #finishBlock()} from {@link BlockType#DATA}<a name="line.863"></a>
-<span class="sourceLineNo">864</span>     * to {@link BlockType#ENCODED_DATA}.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>     */<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    private BlockType blockType;<a name="line.866"></a>
-<span class="sourceLineNo">867</span><a name="line.867"></a>
-<span class="sourceLineNo">868</span>    /**<a name="line.868"></a>
-<span class="sourceLineNo">869</span>     * A stream that we write uncompressed bytes to, which compresses them and<a name="line.869"></a>
-<span class="sourceLineNo">870</span>     * writes them to {@link #baosInMemory}.<a name="line.870"></a>
-<span class="sourceLineNo">871</span>     */<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    private DataOutputStream userDataStream;<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // Size of actual data being written. Not considering the block encoding/compression. This<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    // includes the header size also.<a name="line.875"></a>
-<span class="sourceLineNo">876</span>    private int unencodedDataSizeWritten;<a name="line.876"></a>
+<span class="sourceLineNo">333</span>   * &lt;p&gt;TODO: The caller presumes no checksumming<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * required of this block instance since going into cache; checksum already verified on<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   * underlying block data pulled in from filesystem. Is that correct? What if cache is SSD?<a name="line.335"></a>
+<span class="sourceLineNo">336</span>   *<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * @param b block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes)<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * @param offset the file offset the block was read from<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * @param fileContext HFile meta data<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  @VisibleForTesting<a name="line.347"></a>
+<span class="sourceLineNo">348</span>  public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer b, boolean fillHeader,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      long offset, final int nextBlockOnDiskSize, int onDiskDataSizeWithHeader,<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      HFileContext fileContext) {<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.352"></a>
+<span class="sourceLineNo">353</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    this.buf = new SingleByteBuff(b);<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    if (fillHeader) {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      overwriteHeader();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    this.buf.rewind();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * to that point.<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @param buf Has header, content, and trailing checksums if present.<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  HFileBlock(ByteBuff buf, boolean usesHBaseChecksum, MemoryType memType, final long offset,<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      final int nextBlockOnDiskSize, HFileContext fileContext) throws IOException {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>    buf.rewind();<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    final BlockType blockType = BlockType.read(buf);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>    final int onDiskSizeWithoutHeader = buf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX);<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    final int uncompressedSizeWithoutHeader =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>        buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // This constructor is called when we deserialize a block from cache and when we read a block in<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    // from the fs. fileCache is null when deserialized from cache so need to make up one.<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    HFileContextBuilder fileContextBuilder = fileContext != null?<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        new HFileContextBuilder(fileContext): new HFileContextBuilder();<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    int onDiskDataSizeWithHeader;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    if (usesHBaseChecksum) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      onDiskDataSizeWithHeader = buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      // Use the checksum type and bytes per checksum from header, not from filecontext.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    } else {<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      fileContextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>      fileContextBuilder.withBytesPerCheckSum(0);<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      // Need to fix onDiskDataSizeWithHeader; there are not checksums after-block-data<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + headerSize(usesHBaseChecksum);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    fileContext = fileContextBuilder.build();<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    assert usesHBaseChecksum == fileContext.isUseHBaseChecksum();<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    init(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        prevBlockOffset, offset, onDiskDataSizeWithHeader, nextBlockOnDiskSize, fileContext);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    this.memType = memType;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    this.offset = offset;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    this.buf = buf;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    this.buf.rewind();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  /**<a name="line.405"></a>
+<span class="sourceLineNo">406</span>   * Called from constructors.<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private void init(BlockType blockType, int onDiskSizeWithoutHeader,<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      int uncompressedSizeWithoutHeader, long prevBlockOffset,<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      long offset, int onDiskDataSizeWithHeader, final int nextBlockOnDiskSize,<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      HFileContext fileContext) {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>    this.blockType = blockType;<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    this.offset = offset;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    this.nextBlockOnDiskSize = nextBlockOnDiskSize;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    this.fileContext = fileContext;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * Parse total on disk size including header and checksum.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * @param headerBuf Header ByteBuffer. Presumed exact size of header.<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   * @param verifyChecksum true if checksum verification is in use.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   * @return Size of the block with header included.<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf,<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      boolean verifyChecksum) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      headerSize(verifyChecksum);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   * @return the on-disk size of the next block (including the header size and any checksums if<a name="line.435"></a>
+<span class="sourceLineNo">436</span>   * present) read by peeking into the next block's header; use as a hint when doing<a name="line.436"></a>
+<span class="sourceLineNo">437</span>   * a read of the next block when scanning or running over a file.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  int getNextBlockOnDiskSize() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>    return nextBlockOnDiskSize;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  public BlockType getBlockType() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    return blockType;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  short getDataBlockEncodingId() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>    }<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    return buf.getShort(headerSize());<a name="line.454"></a>
+<span class="sourceLineNo">455</span>  }<a name="line.455"></a>
+<span class="sourceLineNo">456</span><a name="line.456"></a>
+<span class="sourceLineNo">457</span>  /**<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   * @return the on-disk size of header + data part + checksum.<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   */<a name="line.459"></a>
+<span class="sourceLineNo">460</span>  public int getOnDiskSizeWithHeader() {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.461"></a>
+<span class="sourceLineNo">462</span>  }<a name="line.462"></a>
+<span class="sourceLineNo">463</span><a name="line.463"></a>
+<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
+<span class="sourceLineNo">465</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.465"></a>
+<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  int getOnDiskSizeWithoutHeader() {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return onDiskSizeWithoutHeader;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   int getUncompressedSizeWithoutHeader() {<a name="line.474"></a>
+<span class="sourceLineNo">475</span>    return uncompressedSizeWithoutHeader;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>  }<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>  /**<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   *         -1 if unknown<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   */<a name="line.481"></a>
+<span class="sourceLineNo">482</span>  long getPrevBlockOffset() {<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    return prevBlockOffset;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /**<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * is modified as side-effect.<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   */<a name="line.489"></a>
+<span class="sourceLineNo">490</span>  private void overwriteHeader() {<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    buf.rewind();<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    blockType.write(buf);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    buf.putLong(prevBlockOffset);<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.498"></a>
+<span class="sourceLineNo">499</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    }<a name="line.500"></a>
+<span class="sourceLineNo">501</span>  }<a name="line.501"></a>
+<span class="sourceLineNo">502</span><a name="line.502"></a>
+<span class="sourceLineNo">503</

<TRUNCATED>